Compare commits
15 Commits
mugiten-v0
...
mugiten-v0
| Author | SHA1 | Date | |
|---|---|---|---|
|
0d3ebc97f5
|
|||
|
bb68319527
|
|||
|
2803db9c12
|
|||
|
93b76ed660
|
|||
|
29a3a6aafb
|
|||
|
3a2adf0367
|
|||
|
eae6e881a7
|
|||
|
0a3387e77a
|
|||
|
f30465a33c
|
|||
|
d9006a0767
|
|||
|
1e1761ab4d
|
|||
|
37d29fc6ad
|
|||
|
60898fe9a2
|
|||
|
5049157b02
|
|||
|
1868c6fb41
|
41
analysis_options.yaml
Normal file
41
analysis_options.yaml
Normal file
@@ -0,0 +1,41 @@
|
||||
# This file configures the analyzer, which statically analyzes Dart code to
|
||||
# check for errors, warnings, and lints.
|
||||
#
|
||||
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
|
||||
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
|
||||
# invoked from the command line by running `flutter analyze`.
|
||||
|
||||
# The following line activates a set of recommended lints for Flutter apps,
|
||||
# packages, and plugins designed to encourage good coding practices.
|
||||
include:
|
||||
- package:lints/recommended.yaml
|
||||
|
||||
linter:
|
||||
# The lint rules applied to this project can be customized in the
|
||||
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
|
||||
# included above or to enable additional rules. A list of all available lints
|
||||
# and their documentation is published at https://dart.dev/lints.
|
||||
#
|
||||
# Instead of disabling a lint rule for the entire project in the
|
||||
# section below, it can also be suppressed for a single line of code
|
||||
# or a specific dart file by using the `// ignore: name_of_lint` and
|
||||
# `// ignore_for_file: name_of_lint` syntax on the line or in the file
|
||||
# producing the lint.
|
||||
rules:
|
||||
always_declare_return_types: true
|
||||
annotate_redeclares: true
|
||||
avoid_print: false
|
||||
avoid_setters_without_getters: true
|
||||
avoid_slow_async_io: true
|
||||
directives_ordering: true
|
||||
eol_at_end_of_file: true
|
||||
prefer_const_declarations: true
|
||||
prefer_contains: true
|
||||
prefer_final_fields: true
|
||||
prefer_final_locals: true
|
||||
prefer_single_quotes: true
|
||||
use_key_in_widget_constructors: true
|
||||
use_null_aware_elements: true
|
||||
|
||||
# Additional information about this file can be found at
|
||||
# https://dart.dev/guides/language/analysis-options
|
||||
@@ -9,7 +9,7 @@ import 'package:jadb/cli/commands/query_word.dart';
|
||||
Future<void> main(List<String> args) async {
|
||||
final runner = CommandRunner(
|
||||
'jadb',
|
||||
"CLI tool to help creating and testing the jadb database",
|
||||
'CLI tool to help creating and testing the jadb database',
|
||||
);
|
||||
|
||||
runner.addCommand(CreateDb());
|
||||
|
||||
12
flake.lock
generated
12
flake.lock
generated
@@ -3,7 +3,7 @@
|
||||
"jmdict-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-sLl+OrVBgc4QCOZ2cvWGLZBerHDLuApyQOQyDyLUHtk=",
|
||||
"narHash": "sha256-5Y4ySJadyNF/Ckjv9rEjIpLnoN0YpbN+cvOawqiuo5Y=",
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e.gz"
|
||||
},
|
||||
@@ -15,7 +15,7 @@
|
||||
"jmdict-with-examples-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-FQvkYXwgmCJ+ChVkoFzamlG8kyczHAgsJ3zJ6OvRLZc=",
|
||||
"narHash": "sha256-/lOum1C/0zuq9W+g/TajsOgkTeai8vW4ubUdfX8ahX0=",
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e_examp.gz"
|
||||
},
|
||||
@@ -27,7 +27,7 @@
|
||||
"kanjidic2-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-vyMpRnN9O3vCpvfVDACKdTlapBVx6yXg0X2tgXF2t+U=",
|
||||
"narHash": "sha256-2RCsAsosBjMAgTzmd8YLa5qP9HIVy6wP4DoMNy1LCKM=",
|
||||
"type": "file",
|
||||
"url": "https://www.edrdg.org/kanjidic/kanjidic2.xml.gz"
|
||||
},
|
||||
@@ -38,11 +38,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1751792365,
|
||||
"narHash": "sha256-J1kI6oAj25IG4EdVlg2hQz8NZTBNYvIS0l4wpr9KcUo=",
|
||||
"lastModified": 1752480373,
|
||||
"narHash": "sha256-JHQbm+OcGp32wAsXTE/FLYGNpb+4GLi5oTvCxwSoBOA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1fd8bada0b6117e6c7eb54aad5813023eed37ccb",
|
||||
"rev": "62e0f05ede1da0d54515d4ea8ce9c733f12d9f08",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
||||
@@ -16,14 +16,15 @@ abstract class Element extends SQLWritable {
|
||||
this.nf,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'reading': reading,
|
||||
'news': news,
|
||||
'ichi': ichi,
|
||||
'spec': spec,
|
||||
'gai': gai,
|
||||
'nf': nf,
|
||||
};
|
||||
'reading': reading,
|
||||
'news': news,
|
||||
'ichi': ichi,
|
||||
'spec': spec,
|
||||
'gai': gai,
|
||||
'nf': nf,
|
||||
};
|
||||
}
|
||||
|
||||
class KanjiElement extends Element {
|
||||
@@ -33,26 +34,19 @@ class KanjiElement extends Element {
|
||||
KanjiElement({
|
||||
this.info = const [],
|
||||
required this.orderNum,
|
||||
required String reading,
|
||||
int? news,
|
||||
int? ichi,
|
||||
int? spec,
|
||||
int? gai,
|
||||
int? nf,
|
||||
}) : super(
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
required super.reading,
|
||||
super.news,
|
||||
super.ichi,
|
||||
super.spec,
|
||||
super.gai,
|
||||
super.nf,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
}
|
||||
|
||||
class ReadingElement extends Element {
|
||||
@@ -66,27 +60,20 @@ class ReadingElement extends Element {
|
||||
required this.readingDoesNotMatchKanji,
|
||||
this.info = const [],
|
||||
this.restrictions = const [],
|
||||
required String reading,
|
||||
int? news,
|
||||
int? ichi,
|
||||
int? spec,
|
||||
int? gai,
|
||||
int? nf,
|
||||
}) : super(
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
required super.reading,
|
||||
super.news,
|
||||
super.ichi,
|
||||
super.spec,
|
||||
super.gai,
|
||||
super.nf,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
|
||||
};
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
|
||||
};
|
||||
}
|
||||
|
||||
class LanguageSource extends SQLWritable {
|
||||
@@ -104,11 +91,11 @@ class LanguageSource extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
}
|
||||
|
||||
class Glossary extends SQLWritable {
|
||||
@@ -116,48 +103,41 @@ class Glossary extends SQLWritable {
|
||||
final String phrase;
|
||||
final String? type;
|
||||
|
||||
const Glossary({
|
||||
required this.language,
|
||||
required this.phrase,
|
||||
this.type,
|
||||
});
|
||||
const Glossary({required this.language, required this.phrase, this.type});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'type': type,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'type': type,
|
||||
};
|
||||
}
|
||||
|
||||
final kanaRegex =
|
||||
RegExp(r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$', unicode: true);
|
||||
final kanaRegex = RegExp(
|
||||
r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$',
|
||||
unicode: true,
|
||||
);
|
||||
|
||||
class XRefParts {
|
||||
final String? kanjiRef;
|
||||
final String? readingRef;
|
||||
final int? senseOrderNum;
|
||||
|
||||
const XRefParts({
|
||||
this.kanjiRef,
|
||||
this.readingRef,
|
||||
this.senseOrderNum,
|
||||
}) : assert(kanjiRef != null || readingRef != null);
|
||||
const XRefParts({this.kanjiRef, this.readingRef, this.senseOrderNum})
|
||||
: assert(kanjiRef != null || readingRef != null);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'kanjiRef': kanjiRef,
|
||||
'readingRef': readingRef,
|
||||
'senseOrderNum': senseOrderNum,
|
||||
};
|
||||
'kanjiRef': kanjiRef,
|
||||
'readingRef': readingRef,
|
||||
'senseOrderNum': senseOrderNum,
|
||||
};
|
||||
}
|
||||
|
||||
class XRef {
|
||||
final String entryId;
|
||||
final String reading;
|
||||
|
||||
const XRef({
|
||||
required this.entryId,
|
||||
required this.reading,
|
||||
});
|
||||
const XRef({required this.entryId, required this.reading});
|
||||
}
|
||||
|
||||
class Sense extends SQLWritable {
|
||||
@@ -193,9 +173,9 @@ class Sense extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'senseId': senseId,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
'senseId': senseId,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
|
||||
bool get isEmpty =>
|
||||
antonyms.isEmpty &&
|
||||
@@ -224,5 +204,6 @@ class Entry extends SQLWritable {
|
||||
required this.senses,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {'entryId': entryId};
|
||||
}
|
||||
|
||||
@@ -18,18 +18,20 @@ ResolvedXref resolveXref(
|
||||
XRefParts xref,
|
||||
) {
|
||||
List<Entry> candidateEntries = switch ((xref.kanjiRef, xref.readingRef)) {
|
||||
(null, null) =>
|
||||
throw Exception('Xref $xref has no kanji or reading reference'),
|
||||
(String k, null) => entriesByKanji[k]!.toList(),
|
||||
(null, String r) => entriesByReading[r]!.toList(),
|
||||
(String k, String r) =>
|
||||
(null, null) => throw Exception(
|
||||
'Xref $xref has no kanji or reading reference',
|
||||
),
|
||||
(final String k, null) => entriesByKanji[k]!.toList(),
|
||||
(null, final String r) => entriesByReading[r]!.toList(),
|
||||
(final String k, final String r) =>
|
||||
entriesByKanji[k]!.intersection(entriesByReading[r]!).toList(),
|
||||
};
|
||||
|
||||
// Filter out entries that don't have the number of senses specified in the xref
|
||||
if (xref.senseOrderNum != null) {
|
||||
candidateEntries
|
||||
.retainWhere((entry) => entry.senses.length >= xref.senseOrderNum!);
|
||||
candidateEntries.retainWhere(
|
||||
(entry) => entry.senses.length >= xref.senseOrderNum!,
|
||||
);
|
||||
}
|
||||
|
||||
// If the xref has a reading ref but no kanji ref, and there are multiple
|
||||
@@ -38,8 +40,9 @@ ResolvedXref resolveXref(
|
||||
if (xref.kanjiRef == null &&
|
||||
xref.readingRef != null &&
|
||||
candidateEntries.length > 1) {
|
||||
final candidatesWithEmptyKanji =
|
||||
candidateEntries.where((entry) => entry.kanji.length == 0).toList();
|
||||
final candidatesWithEmptyKanji = candidateEntries
|
||||
.where((entry) => entry.kanji.isEmpty)
|
||||
.toList();
|
||||
|
||||
if (candidatesWithEmptyKanji.isNotEmpty) {
|
||||
candidateEntries = candidatesWithEmptyKanji;
|
||||
@@ -50,7 +53,7 @@ ResolvedXref resolveXref(
|
||||
// entry in case there are multiple candidates left.
|
||||
candidateEntries.sortBy<num>((entry) => entry.senses.length);
|
||||
|
||||
if (candidateEntries.length == 0) {
|
||||
if (candidateEntries.isEmpty) {
|
||||
throw Exception(
|
||||
'SKIPPING: Xref $xref has ${candidateEntries.length} entries, '
|
||||
'kanjiRef: ${xref.kanjiRef}, readingRef: ${xref.readingRef}, '
|
||||
@@ -80,20 +83,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.kanjiElement,
|
||||
k.sqlValue..addAll({
|
||||
'entryId': e.entryId,
|
||||
'elementId': elementId,
|
||||
}),
|
||||
k.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
|
||||
for (final i in k.info) {
|
||||
b.insert(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.kanjiInfo, {
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,29 +98,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.readingElement,
|
||||
r.sqlValue..addAll({
|
||||
'entryId': e.entryId,
|
||||
'elementId': elementId,
|
||||
}),
|
||||
r.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
|
||||
for (final i in r.info) {
|
||||
b.insert(
|
||||
JMdictTableNames.readingInfo,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.readingInfo, {
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
for (final res in r.restrictions) {
|
||||
b.insert(
|
||||
JMdictTableNames.readingRestriction,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'restriction': res,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.readingRestriction, {
|
||||
'elementId': elementId,
|
||||
'restriction': res,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -136,16 +124,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
for (final e in entries) {
|
||||
for (final s in e.senses) {
|
||||
b.insert(
|
||||
JMdictTableNames.sense, s.sqlValue..addAll({'entryId': e.entryId}));
|
||||
JMdictTableNames.sense,
|
||||
s.sqlValue..addAll({'entryId': e.entryId}),
|
||||
);
|
||||
for (final d in s.dialects) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseDialect,
|
||||
{'senseId': s.senseId, 'dialect': d},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseDialect, {
|
||||
'senseId': s.senseId,
|
||||
'dialect': d,
|
||||
});
|
||||
}
|
||||
for (final f in s.fields) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseField, {'senseId': s.senseId, 'field': f});
|
||||
b.insert(JMdictTableNames.senseField, {
|
||||
'senseId': s.senseId,
|
||||
'field': f,
|
||||
});
|
||||
}
|
||||
for (final i in s.info) {
|
||||
b.insert(JMdictTableNames.senseInfo, {'senseId': s.senseId, 'info': i});
|
||||
@@ -157,16 +149,18 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
b.insert(JMdictTableNames.sensePOS, {'senseId': s.senseId, 'pos': p});
|
||||
}
|
||||
for (final rk in s.restrictedToKanji) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
{'entryId': e.entryId, 'senseId': s.senseId, 'kanji': rk},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseRestrictedToKanji, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'kanji': rk,
|
||||
});
|
||||
}
|
||||
for (final rr in s.restrictedToReading) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
{'entryId': e.entryId, 'senseId': s.senseId, 'reading': rr},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseRestrictedToReading, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'reading': rr,
|
||||
});
|
||||
}
|
||||
for (final ls in s.languageSource) {
|
||||
b.insert(
|
||||
@@ -186,7 +180,7 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
await b.commit(noResult: true);
|
||||
|
||||
print(' [JMdict] Building xref trees');
|
||||
SplayTreeMap<String, Set<Entry>> entriesByKanji = SplayTreeMap();
|
||||
final SplayTreeMap<String, Set<Entry>> entriesByKanji = SplayTreeMap();
|
||||
|
||||
for (final entry in entries) {
|
||||
for (final kanji in entry.kanji) {
|
||||
@@ -197,7 +191,7 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
}
|
||||
}
|
||||
}
|
||||
SplayTreeMap<String, Set<Entry>> entriesByReading = SplayTreeMap();
|
||||
final SplayTreeMap<String, Set<Entry>> entriesByReading = SplayTreeMap();
|
||||
for (final entry in entries) {
|
||||
for (final reading in entry.readings) {
|
||||
if (entriesByReading.containsKey(reading.reading)) {
|
||||
@@ -220,17 +214,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
xref,
|
||||
);
|
||||
|
||||
b.insert(
|
||||
JMdictTableNames.senseSeeAlso,
|
||||
{
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'seeAlsoKanji': xref.kanjiRef,
|
||||
'seeAlsoReading': xref.readingRef,
|
||||
'seeAlsoSense': xref.senseOrderNum,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseSeeAlso, {
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'seeAlsoKanji': xref.kanjiRef,
|
||||
'seeAlsoReading': xref.readingRef,
|
||||
'seeAlsoSense': xref.senseOrderNum,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
});
|
||||
}
|
||||
|
||||
for (final ant in s.antonyms) {
|
||||
|
||||
@@ -8,15 +8,16 @@ List<int?> getPriorityValues(XmlElement e, String prefix) {
|
||||
int? news, ichi, spec, gai, nf;
|
||||
for (final pri in e.findElements('${prefix}_pri')) {
|
||||
final txt = pri.innerText;
|
||||
if (txt.startsWith('news'))
|
||||
if (txt.startsWith('news')) {
|
||||
news = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('ichi'))
|
||||
} else if (txt.startsWith('ichi'))
|
||||
ichi = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('spec'))
|
||||
spec = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('gai'))
|
||||
gai = int.parse(txt.substring(3));
|
||||
else if (txt.startsWith('nf')) nf = int.parse(txt.substring(2));
|
||||
else if (txt.startsWith('nf'))
|
||||
nf = int.parse(txt.substring(2));
|
||||
}
|
||||
return [news, ichi, spec, gai, nf];
|
||||
}
|
||||
@@ -46,10 +47,7 @@ XRefParts parseXrefParts(String s) {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
result = XRefParts(
|
||||
kanjiRef: parts[0],
|
||||
readingRef: parts[1],
|
||||
);
|
||||
result = XRefParts(kanjiRef: parts[0], readingRef: parts[1]);
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -82,7 +80,7 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
final List<Sense> senses = [];
|
||||
|
||||
for (final (kanjiNum, k_ele) in entry.findElements('k_ele').indexed) {
|
||||
final ke_pri = getPriorityValues(k_ele, 'ke');
|
||||
final kePri = getPriorityValues(k_ele, 'ke');
|
||||
kanjiEls.add(
|
||||
KanjiElement(
|
||||
orderNum: kanjiNum + 1,
|
||||
@@ -91,19 +89,20 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
.toList(),
|
||||
reading: k_ele.findElements('keb').first.innerText,
|
||||
news: ke_pri[0],
|
||||
ichi: ke_pri[1],
|
||||
spec: ke_pri[2],
|
||||
gai: ke_pri[3],
|
||||
nf: ke_pri[4],
|
||||
news: kePri[0],
|
||||
ichi: kePri[1],
|
||||
spec: kePri[2],
|
||||
gai: kePri[3],
|
||||
nf: kePri[4],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
for (final (orderNum, r_ele) in entry.findElements('r_ele').indexed) {
|
||||
final re_pri = getPriorityValues(r_ele, 're');
|
||||
final readingDoesNotMatchKanji =
|
||||
r_ele.findElements('re_nokanji').isNotEmpty;
|
||||
final rePri = getPriorityValues(r_ele, 're');
|
||||
final readingDoesNotMatchKanji = r_ele
|
||||
.findElements('re_nokanji')
|
||||
.isNotEmpty;
|
||||
readingEls.add(
|
||||
ReadingElement(
|
||||
orderNum: orderNum + 1,
|
||||
@@ -112,14 +111,16 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
.findElements('re_inf')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
.toList(),
|
||||
restrictions:
|
||||
r_ele.findElements('re_restr').map((e) => e.innerText).toList(),
|
||||
restrictions: r_ele
|
||||
.findElements('re_restr')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
reading: r_ele.findElements('reb').first.innerText,
|
||||
news: re_pri[0],
|
||||
ichi: re_pri[1],
|
||||
spec: re_pri[2],
|
||||
gai: re_pri[3],
|
||||
nf: re_pri[4],
|
||||
news: rePri[0],
|
||||
ichi: rePri[1],
|
||||
spec: rePri[2],
|
||||
gai: rePri[3],
|
||||
nf: rePri[4],
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -129,10 +130,14 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
final result = Sense(
|
||||
senseId: senseId,
|
||||
orderNum: orderNum + 1,
|
||||
restrictedToKanji:
|
||||
sense.findElements('stagk').map((e) => e.innerText).toList(),
|
||||
restrictedToReading:
|
||||
sense.findElements('stagr').map((e) => e.innerText).toList(),
|
||||
restrictedToKanji: sense
|
||||
.findElements('stagk')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
restrictedToReading: sense
|
||||
.findElements('stagr')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
pos: sense
|
||||
.findElements('pos')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
|
||||
@@ -13,42 +13,33 @@ class CodePoint extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'codepoint': codepoint,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'codepoint': codepoint,
|
||||
};
|
||||
}
|
||||
|
||||
class Radical extends SQLWritable {
|
||||
final String kanji;
|
||||
final int radicalId;
|
||||
|
||||
const Radical({
|
||||
required this.kanji,
|
||||
required this.radicalId,
|
||||
});
|
||||
const Radical({required this.kanji, required this.radicalId});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'radicalId': radicalId,
|
||||
};
|
||||
Map<String, Object?> get sqlValue => {'kanji': kanji, 'radicalId': radicalId};
|
||||
}
|
||||
|
||||
class StrokeMiscount extends SQLWritable {
|
||||
final String kanji;
|
||||
final int strokeCount;
|
||||
|
||||
const StrokeMiscount({
|
||||
required this.kanji,
|
||||
required this.strokeCount,
|
||||
});
|
||||
const StrokeMiscount({required this.kanji, required this.strokeCount});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'strokeCount': strokeCount,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'strokeCount': strokeCount,
|
||||
};
|
||||
}
|
||||
|
||||
class Variant extends SQLWritable {
|
||||
@@ -64,10 +55,10 @@ class Variant extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'variant': variant,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'variant': variant,
|
||||
};
|
||||
}
|
||||
|
||||
class DictionaryReference extends SQLWritable {
|
||||
@@ -83,10 +74,10 @@ class DictionaryReference extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'ref': ref,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'ref': ref,
|
||||
};
|
||||
}
|
||||
|
||||
class DictionaryReferenceMoro extends SQLWritable {
|
||||
@@ -104,11 +95,11 @@ class DictionaryReferenceMoro extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'ref': ref,
|
||||
'volume': volume,
|
||||
'page': page,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'ref': ref,
|
||||
'volume': volume,
|
||||
'page': page,
|
||||
};
|
||||
}
|
||||
|
||||
class QueryCode extends SQLWritable {
|
||||
@@ -126,11 +117,11 @@ class QueryCode extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'code': code,
|
||||
'type': type,
|
||||
'skipMisclassification': skipMisclassification,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'code': code,
|
||||
'type': type,
|
||||
'skipMisclassification': skipMisclassification,
|
||||
};
|
||||
}
|
||||
|
||||
class Reading extends SQLWritable {
|
||||
@@ -146,10 +137,10 @@ class Reading extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'reading': reading,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'reading': reading,
|
||||
};
|
||||
}
|
||||
|
||||
class Kunyomi extends SQLWritable {
|
||||
@@ -165,10 +156,10 @@ class Kunyomi extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
};
|
||||
}
|
||||
|
||||
class Onyomi extends SQLWritable {
|
||||
@@ -186,11 +177,11 @@ class Onyomi extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
'type': type,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
'type': type,
|
||||
};
|
||||
}
|
||||
|
||||
class Meaning extends SQLWritable {
|
||||
@@ -206,10 +197,10 @@ class Meaning extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'language': language,
|
||||
'meaning': meaning,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'language': language,
|
||||
'meaning': meaning,
|
||||
};
|
||||
}
|
||||
|
||||
class Character extends SQLWritable {
|
||||
@@ -254,11 +245,12 @@ class Character extends SQLWritable {
|
||||
this.nanori = const [],
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'literal': literal,
|
||||
'grade': grade,
|
||||
'strokeCount': strokeCount,
|
||||
'frequency': frequency,
|
||||
'jlpt': jlpt,
|
||||
};
|
||||
'literal': literal,
|
||||
'grade': grade,
|
||||
'strokeCount': strokeCount,
|
||||
'frequency': frequency,
|
||||
'jlpt': jlpt,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -19,10 +19,7 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
assert(c.radical != null, 'Radical name without radical');
|
||||
b.insert(
|
||||
KANJIDICTableNames.radicalName,
|
||||
{
|
||||
'radicalId': c.radical!.radicalId,
|
||||
'name': n,
|
||||
},
|
||||
{'radicalId': c.radical!.radicalId, 'name': n},
|
||||
conflictAlgorithm: ConflictAlgorithm.ignore,
|
||||
);
|
||||
}
|
||||
@@ -34,13 +31,10 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
b.insert(KANJIDICTableNames.radical, c.radical!.sqlValue);
|
||||
}
|
||||
for (final sm in c.strokeMiscounts) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.strokeMiscount,
|
||||
{
|
||||
'kanji': c.literal,
|
||||
'strokeCount': sm,
|
||||
},
|
||||
);
|
||||
b.insert(KANJIDICTableNames.strokeMiscount, {
|
||||
'kanji': c.literal,
|
||||
'strokeCount': sm,
|
||||
});
|
||||
}
|
||||
for (final v in c.variants) {
|
||||
b.insert(KANJIDICTableNames.variant, v.sqlValue);
|
||||
@@ -64,24 +58,24 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
}
|
||||
for (final (i, y) in c.kunyomi.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.kunyomi, y.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.kunyomi,
|
||||
y.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final (i, y) in c.onyomi.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.onyomi, y.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.onyomi,
|
||||
y.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final (i, m) in c.meanings.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.meaning, m.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.meaning,
|
||||
m.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final n in c.nanori) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.nanori,
|
||||
{
|
||||
'kanji': c.literal,
|
||||
'nanori': n,
|
||||
},
|
||||
);
|
||||
b.insert(KANJIDICTableNames.nanori, {'kanji': c.literal, 'nanori': n});
|
||||
}
|
||||
}
|
||||
await b.commit(noResult: true);
|
||||
|
||||
@@ -10,27 +10,33 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
final codepoint = c.findElements('codepoint').firstOrNull;
|
||||
final radical = c.findElements('radical').firstOrNull;
|
||||
final misc = c.findElements('misc').first;
|
||||
final dic_number = c.findElements('dic_number').firstOrNull;
|
||||
final query_code = c.findElements('query_code').first;
|
||||
final reading_meaning = c.findElements('reading_meaning').firstOrNull;
|
||||
final dicNumber = c.findElements('dic_number').firstOrNull;
|
||||
final queryCode = c.findElements('query_code').first;
|
||||
final readingMeaning = c.findElements('reading_meaning').firstOrNull;
|
||||
|
||||
// TODO: Group readings and meanings by their rmgroup parent node.
|
||||
|
||||
result.add(
|
||||
Character(
|
||||
literal: kanji,
|
||||
strokeCount:
|
||||
int.parse(misc.findElements('stroke_count').first.innerText),
|
||||
strokeCount: int.parse(
|
||||
misc.findElements('stroke_count').first.innerText,
|
||||
),
|
||||
grade: int.tryParse(
|
||||
misc.findElements('grade').firstOrNull?.innerText ?? ''),
|
||||
misc.findElements('grade').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
frequency: int.tryParse(
|
||||
misc.findElements('freq').firstOrNull?.innerText ?? ''),
|
||||
misc.findElements('freq').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
jlpt: int.tryParse(
|
||||
misc.findElements('jlpt').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
radicalName:
|
||||
misc.findElements('rad_name').map((e) => e.innerText).toList(),
|
||||
codepoints: codepoint
|
||||
radicalName: misc
|
||||
.findElements('rad_name')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
codepoints:
|
||||
codepoint
|
||||
?.findElements('cp_value')
|
||||
.map(
|
||||
(e) => CodePoint(
|
||||
@@ -45,10 +51,7 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
?.findElements('rad_value')
|
||||
.where((e) => e.getAttribute('rad_type') == 'classical')
|
||||
.map(
|
||||
(e) => Radical(
|
||||
kanji: kanji,
|
||||
radicalId: int.parse(e.innerText),
|
||||
),
|
||||
(e) => Radical(kanji: kanji, radicalId: int.parse(e.innerText)),
|
||||
)
|
||||
.firstOrNull,
|
||||
strokeMiscounts: misc
|
||||
@@ -66,7 +69,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
dictionaryReferences: dic_number
|
||||
dictionaryReferences:
|
||||
dicNumber
|
||||
?.findElements('dic_ref')
|
||||
.where((e) => e.getAttribute('dr_type') != 'moro')
|
||||
.map(
|
||||
@@ -78,7 +82,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
dictionaryReferencesMoro: dic_number
|
||||
dictionaryReferencesMoro:
|
||||
dicNumber
|
||||
?.findElements('dic_ref')
|
||||
.where((e) => e.getAttribute('dr_type') == 'moro')
|
||||
.map(
|
||||
@@ -91,7 +96,7 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
querycodes: query_code
|
||||
querycodes: queryCode
|
||||
.findElements('q_code')
|
||||
.map(
|
||||
(e) => QueryCode(
|
||||
@@ -102,7 +107,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
readings: reading_meaning
|
||||
readings:
|
||||
readingMeaning
|
||||
?.findAllElements('reading')
|
||||
.where(
|
||||
(e) =>
|
||||
@@ -117,7 +123,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
kunyomi: reading_meaning
|
||||
kunyomi:
|
||||
readingMeaning
|
||||
?.findAllElements('reading')
|
||||
.where((e) => e.getAttribute('r_type') == 'ja_kun')
|
||||
.map(
|
||||
@@ -129,19 +136,22 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
onyomi: reading_meaning
|
||||
onyomi:
|
||||
readingMeaning
|
||||
?.findAllElements('reading')
|
||||
.where((e) => e.getAttribute('r_type') == 'ja_on')
|
||||
.map(
|
||||
(e) => Onyomi(
|
||||
kanji: kanji,
|
||||
yomi: transliterateKatakanaToHiragana(e.innerText),
|
||||
isJouyou: e.getAttribute('r_status') == 'jy',
|
||||
type: e.getAttribute('on_type')),
|
||||
kanji: kanji,
|
||||
yomi: transliterateKatakanaToHiragana(e.innerText),
|
||||
isJouyou: e.getAttribute('r_status') == 'jy',
|
||||
type: e.getAttribute('on_type'),
|
||||
),
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
meanings: reading_meaning
|
||||
meanings:
|
||||
readingMeaning
|
||||
?.findAllElements('meaning')
|
||||
.map(
|
||||
(e) => Meaning(
|
||||
@@ -152,7 +162,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
nanori: reading_meaning
|
||||
nanori:
|
||||
readingMeaning
|
||||
?.findElements('nanori')
|
||||
.map((e) => e.innerText)
|
||||
.toList() ??
|
||||
|
||||
@@ -22,32 +22,33 @@ Future<Database> openLocalDb({
|
||||
jadbPath = File(jadbPath).resolveSymbolicLinksSync();
|
||||
|
||||
if (libsqlitePath == null) {
|
||||
throw Exception("LIBSQLITE_PATH is not set");
|
||||
throw Exception('LIBSQLITE_PATH is not set');
|
||||
}
|
||||
|
||||
if (!File(libsqlitePath).existsSync()) {
|
||||
throw Exception("LIBSQLITE_PATH does not exist: $libsqlitePath");
|
||||
throw Exception('LIBSQLITE_PATH does not exist: $libsqlitePath');
|
||||
}
|
||||
|
||||
if (!File(jadbPath).existsSync()) {
|
||||
throw Exception("JADB_PATH does not exist: $jadbPath");
|
||||
throw Exception('JADB_PATH does not exist: $jadbPath');
|
||||
}
|
||||
|
||||
final db = await createDatabaseFactoryFfi(
|
||||
ffiInit: () =>
|
||||
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
|
||||
).openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute("PRAGMA journal_mode = WAL");
|
||||
}
|
||||
await db.execute("PRAGMA foreign_keys = ON");
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
final db =
|
||||
await createDatabaseFactoryFfi(
|
||||
ffiInit: () =>
|
||||
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
|
||||
).openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute('PRAGMA journal_mode = WAL');
|
||||
}
|
||||
await db.execute('PRAGMA foreign_keys = ON');
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
|
||||
if (verifyTablesExist) {
|
||||
await db.jadbVerifyTables();
|
||||
|
||||
@@ -3,8 +3,10 @@ import 'dart:io';
|
||||
Iterable<String> parseRADKFILEBlocks(File radkfile) {
|
||||
final String content = File('data/tmp/radkfile_utf8').readAsStringSync();
|
||||
|
||||
final Iterable<String> blocks =
|
||||
content.replaceAll(RegExp(r'^#.*$'), '').split(r'$').skip(2);
|
||||
final Iterable<String> blocks = content
|
||||
.replaceAll(RegExp(r'^#.*$'), '')
|
||||
.split(r'$')
|
||||
.skip(2);
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,20 @@
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<void> seedRADKFILEData(
|
||||
Iterable<String> blocks,
|
||||
Database db,
|
||||
) async {
|
||||
Future<void> seedRADKFILEData(Iterable<String> blocks, Database db) async {
|
||||
final b = db.batch();
|
||||
|
||||
for (final block in blocks) {
|
||||
final String radical = block[1];
|
||||
final List<String> kanjiList = block
|
||||
.replaceFirst(RegExp(r'.*\n'), '')
|
||||
.split('')
|
||||
..removeWhere((e) => e == '' || e == '\n');
|
||||
final List<String> kanjiList =
|
||||
block.replaceFirst(RegExp(r'.*\n'), '').split('')
|
||||
..removeWhere((e) => e == '' || e == '\n');
|
||||
|
||||
for (final kanji in kanjiList.toSet()) {
|
||||
b.insert(
|
||||
RADKFILETableNames.radkfile,
|
||||
{
|
||||
'radical': radical,
|
||||
'kanji': kanji,
|
||||
},
|
||||
);
|
||||
b.insert(RADKFILETableNames.radkfile, {
|
||||
'radical': radical,
|
||||
'kanji': kanji,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,10 +24,10 @@ Future<void> seedData(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromJMdict(Database db) async {
|
||||
print('[JMdict] Reading file content...');
|
||||
String rawXML = File('data/tmp/JMdict.xml').readAsStringSync();
|
||||
final String rawXML = File('data/tmp/JMdict.xml').readAsStringSync();
|
||||
|
||||
print('[JMdict] Parsing XML tags...');
|
||||
XmlElement root = XmlDocument.parse(rawXML).getElement('JMdict')!;
|
||||
final XmlElement root = XmlDocument.parse(rawXML).getElement('JMdict')!;
|
||||
|
||||
print('[JMdict] Parsing XML content...');
|
||||
final entries = parseJMDictData(root);
|
||||
@@ -38,10 +38,10 @@ Future<void> parseAndSeedDataFromJMdict(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromKANJIDIC(Database db) async {
|
||||
print('[KANJIDIC2] Reading file...');
|
||||
String rawXML = File('data/tmp/kanjidic2.xml').readAsStringSync();
|
||||
final String rawXML = File('data/tmp/kanjidic2.xml').readAsStringSync();
|
||||
|
||||
print('[KANJIDIC2] Parsing XML...');
|
||||
XmlElement root = XmlDocument.parse(rawXML).getElement('kanjidic2')!;
|
||||
final XmlElement root = XmlDocument.parse(rawXML).getElement('kanjidic2')!;
|
||||
|
||||
print('[KANJIDIC2] Parsing XML content...');
|
||||
final entries = parseKANJIDICData(root);
|
||||
@@ -52,7 +52,7 @@ Future<void> parseAndSeedDataFromKANJIDIC(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromRADKFILE(Database db) async {
|
||||
print('[RADKFILE] Reading file...');
|
||||
File raw = File('data/tmp/RADKFILE');
|
||||
final File raw = File('data/tmp/RADKFILE');
|
||||
|
||||
print('[RADKFILE] Parsing content...');
|
||||
final blocks = parseRADKFILEBlocks(raw);
|
||||
@@ -63,7 +63,7 @@ Future<void> parseAndSeedDataFromRADKFILE(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromTanosJLPT(Database db) async {
|
||||
print('[TANOS-JLPT] Reading files...');
|
||||
Map<String, File> files = {
|
||||
final Map<String, File> files = {
|
||||
'N1': File('data/tanos-jlpt/n1.csv'),
|
||||
'N2': File('data/tanos-jlpt/n2.csv'),
|
||||
'N3': File('data/tanos-jlpt/n3.csv'),
|
||||
|
||||
@@ -14,7 +14,7 @@ Future<List<JLPTRankedWord>> parseJLPTRankedWords(
|
||||
final file = entry.value;
|
||||
|
||||
if (!file.existsSync()) {
|
||||
throw Exception("File $jlptLevel does not exist");
|
||||
throw Exception('File $jlptLevel does not exist');
|
||||
}
|
||||
|
||||
final rows = await file
|
||||
@@ -25,29 +25,33 @@ Future<List<JLPTRankedWord>> parseJLPTRankedWords(
|
||||
|
||||
for (final row in rows) {
|
||||
if (row.length != 3) {
|
||||
throw Exception("Invalid line in $jlptLevel: $row");
|
||||
throw Exception('Invalid line in $jlptLevel: $row');
|
||||
}
|
||||
|
||||
final kanji = (row[0] as String).isEmpty
|
||||
? null
|
||||
: (row[0] as String)
|
||||
.replaceFirst(RegExp('^お・'), '')
|
||||
.replaceAll(RegExp(r'(.*)'), '');
|
||||
.replaceFirst(RegExp('^お・'), '')
|
||||
.replaceAll(RegExp(r'(.*)'), '');
|
||||
|
||||
final readings = (row[1] as String)
|
||||
.split(RegExp('[・/、(:?\s+)]'))
|
||||
.split(RegExp('[・/、(:?s+)]'))
|
||||
.map((e) => e.trim())
|
||||
.toList();
|
||||
|
||||
final meanings =
|
||||
(row[2] as String).split(',').expand(cleanMeaning).toList();
|
||||
final meanings = (row[2] as String)
|
||||
.split(',')
|
||||
.expand(cleanMeaning)
|
||||
.toList();
|
||||
|
||||
result.add(JLPTRankedWord(
|
||||
readings: readings,
|
||||
kanji: kanji,
|
||||
jlptLevel: jlptLevel,
|
||||
meanings: meanings,
|
||||
));
|
||||
result.add(
|
||||
JLPTRankedWord(
|
||||
readings: readings,
|
||||
kanji: kanji,
|
||||
jlptLevel: jlptLevel,
|
||||
meanings: meanings,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,5 +13,5 @@ class JLPTRankedWord {
|
||||
|
||||
@override
|
||||
String toString() =>
|
||||
'(${jlptLevel},${kanji},"${readings.join(",")}","${meanings.join(",")})';
|
||||
'($jlptLevel,$kanji,"${readings.join(",")}","${meanings.join(",")})';
|
||||
}
|
||||
|
||||
@@ -1,49 +1,39 @@
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/objects.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/overrides.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<List<int>> _findReadingCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
columns: ['entryId'],
|
||||
where:
|
||||
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
|
||||
whereArgs: [...word.readings],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
Future<List<int>> _findReadingCandidates(JLPTRankedWord word, Database db) => db
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
columns: ['entryId'],
|
||||
where:
|
||||
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
|
||||
whereArgs: [...word.readings],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
|
||||
Future<List<int>> _findKanjiCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
columns: ['entryId'],
|
||||
where: 'reading = ?',
|
||||
whereArgs: [word.kanji],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
Future<List<int>> _findKanjiCandidates(JLPTRankedWord word, Database db) => db
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
columns: ['entryId'],
|
||||
where: 'reading = ?',
|
||||
whereArgs: [word.kanji],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
|
||||
Future<List<(int, String)>> _findSenseCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db.rawQuery(
|
||||
) => db
|
||||
.rawQuery(
|
||||
'SELECT entryId, phrase '
|
||||
'FROM "${JMdictTableNames.senseGlossary}" '
|
||||
'JOIN "${JMdictTableNames.sense}" USING (senseId)'
|
||||
'WHERE phrase IN (${List.filled(
|
||||
word.meanings.length,
|
||||
'?',
|
||||
).join(',')})',
|
||||
'WHERE phrase IN (${List.filled(word.meanings.length, '?').join(',')})',
|
||||
[...word.meanings],
|
||||
).then(
|
||||
)
|
||||
.then(
|
||||
(rows) => rows
|
||||
.map((row) => (row['entryId'] as int, row['phrase'] as String))
|
||||
.toList(),
|
||||
@@ -55,8 +45,10 @@ Future<int?> findEntry(
|
||||
bool useOverrides = true,
|
||||
}) async {
|
||||
final List<int> readingCandidates = await _findReadingCandidates(word, db);
|
||||
final List<(int, String)> senseCandidates =
|
||||
await _findSenseCandidates(word, db);
|
||||
final List<(int, String)> senseCandidates = await _findSenseCandidates(
|
||||
word,
|
||||
db,
|
||||
);
|
||||
|
||||
List<int> entryIds;
|
||||
|
||||
@@ -71,8 +63,10 @@ Future<int?> findEntry(
|
||||
print('No entry found, trying to combine with senses');
|
||||
|
||||
entryIds = readingCandidates
|
||||
.where((readingId) =>
|
||||
senseCandidates.any((sense) => sense.$1 == readingId))
|
||||
.where(
|
||||
(readingId) =>
|
||||
senseCandidates.any((sense) => sense.$1 == readingId),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
} else {
|
||||
@@ -88,12 +82,15 @@ Future<int?> findEntry(
|
||||
|
||||
if (overrideEntries.length > 1) {
|
||||
throw Exception(
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds');
|
||||
} else if (overrideEntries.length == 0 &&
|
||||
!word.readings.any((reading) =>
|
||||
TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)))) {
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
} else if (overrideEntries.isEmpty &&
|
||||
!word.readings.any(
|
||||
(reading) => TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)),
|
||||
)) {
|
||||
throw Exception(
|
||||
'No override entry found for ${word.toString()}: $entryIds');
|
||||
'No override entry found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
}
|
||||
|
||||
print('Found override: ${overrideEntries.firstOrNull}');
|
||||
@@ -103,7 +100,8 @@ Future<int?> findEntry(
|
||||
|
||||
if (entryIds.length > 1) {
|
||||
throw Exception(
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds');
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
} else if (entryIds.isEmpty) {
|
||||
throw Exception('No entry found for ${word.toString()}');
|
||||
}
|
||||
|
||||
@@ -5,20 +5,17 @@ Future<void> seedTanosJLPTData(
|
||||
Map<String, Set<int>> resolvedEntries,
|
||||
Database db,
|
||||
) async {
|
||||
Batch b = db.batch();
|
||||
final Batch b = db.batch();
|
||||
|
||||
for (final jlptLevel in resolvedEntries.entries) {
|
||||
final level = jlptLevel.key;
|
||||
final entryIds = jlptLevel.value;
|
||||
|
||||
for (final entryId in entryIds) {
|
||||
b.insert(
|
||||
TanosJLPTTableNames.jlptTag,
|
||||
{
|
||||
'entryId': entryId,
|
||||
'jlptLevel': level,
|
||||
},
|
||||
);
|
||||
b.insert(TanosJLPTTableNames.jlptTag, {
|
||||
'entryId': entryId,
|
||||
'jlptLevel': level,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:jadb/_data_ingestion/seed_database.dart';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/cli/args.dart';
|
||||
|
||||
class CreateDb extends Command {
|
||||
final name = "create-db";
|
||||
final description = "Create the database";
|
||||
@override
|
||||
final name = 'create-db';
|
||||
@override
|
||||
final description = 'Create the database';
|
||||
|
||||
CreateDb() {
|
||||
addLibsqliteArg(argParser);
|
||||
@@ -23,6 +24,7 @@ class CreateDb extends Command {
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
if (argResults!.option('libsqlite') == null) {
|
||||
print(argParser.usage);
|
||||
@@ -36,14 +38,17 @@ class CreateDb extends Command {
|
||||
);
|
||||
|
||||
bool failed = false;
|
||||
await seedData(db).then((_) {
|
||||
print("Database created successfully");
|
||||
}).catchError((error) {
|
||||
print("Error creating database: $error");
|
||||
failed = true;
|
||||
}).whenComplete(() {
|
||||
db.close();
|
||||
});
|
||||
await seedData(db)
|
||||
.then((_) {
|
||||
print('Database created successfully');
|
||||
})
|
||||
.catchError((error) {
|
||||
print('Error creating database: $error');
|
||||
failed = true;
|
||||
})
|
||||
.whenComplete(() {
|
||||
db.close();
|
||||
});
|
||||
if (failed) {
|
||||
exit(1);
|
||||
} else {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/csv_parser.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/objects.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/resolve.dart';
|
||||
@@ -10,9 +9,11 @@ import 'package:jadb/cli/args.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
class CreateTanosJlptMappings extends Command {
|
||||
final name = "create-tanos-jlpt-mappings";
|
||||
@override
|
||||
final name = 'create-tanos-jlpt-mappings';
|
||||
@override
|
||||
final description =
|
||||
"Resolve Tanos JLPT data against JMDict. This tool is useful to create overrides for ambiguous references";
|
||||
'Resolve Tanos JLPT data against JMDict. This tool is useful to create overrides for ambiguous references';
|
||||
|
||||
CreateTanosJlptMappings() {
|
||||
addLibsqliteArg(argParser);
|
||||
@@ -26,6 +27,7 @@ class CreateTanosJlptMappings extends Command {
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
if (argResults!.option('libsqlite') == null ||
|
||||
argResults!.option('jadb') == null) {
|
||||
@@ -40,7 +42,7 @@ class CreateTanosJlptMappings extends Command {
|
||||
|
||||
final useOverrides = argResults!.flag('overrides');
|
||||
|
||||
Map<String, File> files = {
|
||||
final Map<String, File> files = {
|
||||
'N1': File('data/tanos-jlpt/n1.csv'),
|
||||
'N2': File('data/tanos-jlpt/n2.csv'),
|
||||
'N3': File('data/tanos-jlpt/n3.csv'),
|
||||
@@ -59,11 +61,12 @@ Future<void> resolveExisting(
|
||||
Database db,
|
||||
bool useOverrides,
|
||||
) async {
|
||||
List<JLPTRankedWord> missingWords = [];
|
||||
final List<JLPTRankedWord> missingWords = [];
|
||||
for (final (i, word) in rankedWords.indexed) {
|
||||
try {
|
||||
print(
|
||||
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}');
|
||||
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}',
|
||||
);
|
||||
await findEntry(word, db, useOverrides: useOverrides);
|
||||
} catch (e) {
|
||||
print(e);
|
||||
@@ -78,16 +81,19 @@ Future<void> resolveExisting(
|
||||
|
||||
print('Statistics:');
|
||||
for (final jlptLevel in ['N5', 'N4', 'N3', 'N2', 'N1']) {
|
||||
final missingWordCount =
|
||||
missingWords.where((e) => e.jlptLevel == jlptLevel).length;
|
||||
final totalWordCount =
|
||||
rankedWords.where((e) => e.jlptLevel == jlptLevel).length;
|
||||
final missingWordCount = missingWords
|
||||
.where((e) => e.jlptLevel == jlptLevel)
|
||||
.length;
|
||||
final totalWordCount = rankedWords
|
||||
.where((e) => e.jlptLevel == jlptLevel)
|
||||
.length;
|
||||
|
||||
final failureRate =
|
||||
((missingWordCount / totalWordCount) * 100).toStringAsFixed(2);
|
||||
final failureRate = ((missingWordCount / totalWordCount) * 100)
|
||||
.toStringAsFixed(2);
|
||||
|
||||
print(
|
||||
'${jlptLevel} failures: [${missingWordCount}/${totalWordCount}] (${failureRate}%)');
|
||||
'$jlptLevel failures: [$missingWordCount/$totalWordCount] ($failureRate%)',
|
||||
);
|
||||
}
|
||||
|
||||
print('Not able to determine the entry for ${missingWords.length} words');
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
// import 'dart:io';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
// import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:jadb/cli/args.dart';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
|
||||
class Lemmatize extends Command {
|
||||
final name = "lemmatize";
|
||||
final description = "Lemmatize a word using the Jadb lemmatizer";
|
||||
@override
|
||||
final name = 'lemmatize';
|
||||
@override
|
||||
final description = 'Lemmatize a word using the Jadb lemmatizer';
|
||||
|
||||
Lemmatize() {
|
||||
addLibsqliteArg(argParser);
|
||||
@@ -21,6 +22,7 @@ class Lemmatize extends Command {
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
// if (argResults!.option('libsqlite') == null ||
|
||||
// argResults!.option('jadb') == null) {
|
||||
@@ -41,6 +43,6 @@ class Lemmatize extends Command {
|
||||
|
||||
print(result.toString());
|
||||
|
||||
print("Lemmatization took ${time.elapsedMilliseconds}ms");
|
||||
print('Lemmatization took ${time.elapsedMilliseconds}ms');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:jadb/cli/args.dart';
|
||||
import 'package:jadb/search.dart';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
|
||||
class QueryKanji extends Command {
|
||||
final name = "query-kanji";
|
||||
final description = "Query the database for kanji data";
|
||||
final invocation = "jadb query-kanji [options] <kanji>";
|
||||
@override
|
||||
final name = 'query-kanji';
|
||||
@override
|
||||
final description = 'Query the database for kanji data';
|
||||
@override
|
||||
final invocation = 'jadb query-kanji [options] <kanji>';
|
||||
|
||||
QueryKanji() {
|
||||
addLibsqliteArg(argParser);
|
||||
addJadbArg(argParser);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
if (argResults!.option('libsqlite') == null ||
|
||||
argResults!.option('jadb') == null) {
|
||||
@@ -43,11 +46,11 @@ class QueryKanji extends Command {
|
||||
time.stop();
|
||||
|
||||
if (result == null) {
|
||||
print("No such kanji");
|
||||
print('No such kanji');
|
||||
} else {
|
||||
print(JsonEncoder.withIndent(' ').convert(result.toJson()));
|
||||
}
|
||||
|
||||
print("Query took ${time.elapsedMilliseconds}ms");
|
||||
print('Query took ${time.elapsedMilliseconds}ms');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,36 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:jadb/cli/args.dart';
|
||||
import 'package:jadb/search.dart';
|
||||
|
||||
import 'package:args/command_runner.dart';
|
||||
import 'package:sqflite_common/sqflite.dart';
|
||||
|
||||
class QueryWord extends Command {
|
||||
final name = "query-word";
|
||||
final description = "Query the database for word data";
|
||||
final invocation = "jadb query-word [options] (<word> | <ID>)";
|
||||
@override
|
||||
final name = 'query-word';
|
||||
@override
|
||||
final description = 'Query the database for word data';
|
||||
@override
|
||||
final invocation = 'jadb query-word [options] (<word> | <ID>)';
|
||||
|
||||
QueryWord() {
|
||||
addLibsqliteArg(argParser);
|
||||
addJadbArg(argParser);
|
||||
|
||||
argParser.addFlag('json', abbr: 'j', help: 'Output results in JSON format');
|
||||
|
||||
argParser.addOption('page', abbr: 'p', valueHelp: 'NUM', defaultsTo: '0');
|
||||
|
||||
argParser.addOption('pageSize', valueHelp: 'NUM', defaultsTo: '30');
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
if (argResults!.option('libsqlite') == null ||
|
||||
argResults!.option('jadb') == null) {
|
||||
print("You need to provide both libsqlite and jadb paths.");
|
||||
print('You need to provide both libsqlite and jadb paths.');
|
||||
print('');
|
||||
printUsage();
|
||||
exit(64);
|
||||
@@ -38,52 +48,74 @@ class QueryWord extends Command {
|
||||
exit(64);
|
||||
}
|
||||
|
||||
final String searchWord = argResults!.rest.join(" ");
|
||||
final String searchWord = argResults!.rest.join(' ');
|
||||
final int? maybeId = int.tryParse(searchWord);
|
||||
|
||||
if (maybeId != null && maybeId >= 1000000) {
|
||||
await _searchId(db, maybeId);
|
||||
await _searchId(db, maybeId, argResults!.flag('json'));
|
||||
} else {
|
||||
await _searchWord(db, searchWord);
|
||||
await _searchWord(
|
||||
db,
|
||||
searchWord,
|
||||
argResults!.flag('json'),
|
||||
int.parse(argResults!.option('page')!),
|
||||
int.parse(argResults!.option('pageSize')!),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _searchId(DatabaseExecutor db, int id) async {
|
||||
Future<void> _searchId(DatabaseExecutor db, int id, bool jsonOutput) async {
|
||||
final time = Stopwatch()..start();
|
||||
final result = await JaDBConnection(db).jadbGetWordById(id);
|
||||
time.stop();
|
||||
|
||||
if (result == null) {
|
||||
print("Invalid ID");
|
||||
print('Invalid ID');
|
||||
} else {
|
||||
print(result.toString());
|
||||
if (jsonOutput) {
|
||||
print(JsonEncoder.withIndent(' ').convert(result));
|
||||
} else {
|
||||
print(result.toString());
|
||||
}
|
||||
}
|
||||
|
||||
print("Query took ${time.elapsedMilliseconds}ms");
|
||||
print('Query took ${time.elapsedMilliseconds}ms');
|
||||
}
|
||||
|
||||
Future<void> _searchWord(DatabaseExecutor db, String searchWord) async {
|
||||
Future<void> _searchWord(
|
||||
DatabaseExecutor db,
|
||||
String searchWord,
|
||||
bool jsonOutput,
|
||||
int page,
|
||||
int pageSize,
|
||||
) async {
|
||||
final time = Stopwatch()..start();
|
||||
final count = await JaDBConnection(db).jadbSearchWordCount(searchWord);
|
||||
time.stop();
|
||||
|
||||
final time2 = Stopwatch()..start();
|
||||
final result = await JaDBConnection(db).jadbSearchWord(searchWord);
|
||||
final result = await JaDBConnection(
|
||||
db,
|
||||
).jadbSearchWord(searchWord, page: page, pageSize: pageSize);
|
||||
time2.stop();
|
||||
|
||||
if (result == null) {
|
||||
print("Invalid search");
|
||||
print('Invalid search');
|
||||
} else if (result.isEmpty) {
|
||||
print("No matches");
|
||||
print('No matches');
|
||||
} else {
|
||||
for (final e in result) {
|
||||
print(e.toString());
|
||||
print("");
|
||||
if (jsonOutput) {
|
||||
print(JsonEncoder.withIndent(' ').convert(result));
|
||||
} else {
|
||||
for (final e in result) {
|
||||
print(e.toString());
|
||||
print('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
print("Total count: ${count}");
|
||||
print("Count query took ${time.elapsedMilliseconds}ms");
|
||||
print("Query took ${time2.elapsedMilliseconds}ms");
|
||||
print('Total count: $count');
|
||||
print('Count query took ${time.elapsedMilliseconds}ms');
|
||||
print('Query took ${time2.elapsedMilliseconds}ms');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/// Jouyou kanji sorted primarily by grades and secondarily by strokes.
|
||||
const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
{
|
||||
const Map<int, Map<int, List<String>>>
|
||||
JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT = {
|
||||
1: {
|
||||
1: ['一'],
|
||||
2: ['力', '八', '入', '二', '人', '十', '七', '九'],
|
||||
@@ -12,7 +12,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
8: ['林', '青', '空', '金', '学', '雨'],
|
||||
9: ['草', '音'],
|
||||
10: ['校'],
|
||||
12: ['森']
|
||||
12: ['森'],
|
||||
},
|
||||
2: {
|
||||
2: ['刀'],
|
||||
@@ -35,7 +35,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'戸',
|
||||
'元',
|
||||
'牛',
|
||||
'引'
|
||||
'引',
|
||||
],
|
||||
5: ['用', '北', '母', '半', '冬', '台', '矢', '市', '広', '古', '兄', '外'],
|
||||
6: [
|
||||
@@ -58,7 +58,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'交',
|
||||
'会',
|
||||
'回',
|
||||
'羽'
|
||||
'羽',
|
||||
],
|
||||
7: [
|
||||
'里',
|
||||
@@ -78,7 +78,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'近',
|
||||
'汽',
|
||||
'角',
|
||||
'何'
|
||||
'何',
|
||||
],
|
||||
8: [
|
||||
'夜',
|
||||
@@ -95,7 +95,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'国',
|
||||
'京',
|
||||
'岩',
|
||||
'画'
|
||||
'画',
|
||||
],
|
||||
9: [
|
||||
'風',
|
||||
@@ -115,7 +115,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'計',
|
||||
'活',
|
||||
'海',
|
||||
'科'
|
||||
'科',
|
||||
],
|
||||
10: ['馬', '通', '書', '弱', '時', '紙', '高', '原', '帰', '記', '家', '夏'],
|
||||
11: ['理', '野', '鳥', '組', '船', '雪', '週', '細', '黒', '黄', '教', '強', '魚'],
|
||||
@@ -124,7 +124,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
14: ['鳴', '聞', '読', '算', '語', '歌'],
|
||||
15: ['線'],
|
||||
16: ['頭', '親'],
|
||||
18: ['曜', '顔']
|
||||
18: ['曜', '顔'],
|
||||
},
|
||||
3: {
|
||||
2: ['丁'],
|
||||
@@ -146,7 +146,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'皿',
|
||||
'号',
|
||||
'去',
|
||||
'央'
|
||||
'央',
|
||||
],
|
||||
6: ['列', '両', '羊', '有', '全', '州', '守', '式', '次', '死', '向', '血', '曲', '安'],
|
||||
7: ['役', '返', '坂', '豆', '投', '対', '身', '助', '住', '決', '君', '局', '究', '医'],
|
||||
@@ -178,7 +178,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'岸',
|
||||
'泳',
|
||||
'育',
|
||||
'委'
|
||||
'委',
|
||||
],
|
||||
9: [
|
||||
'洋',
|
||||
@@ -210,7 +210,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'急',
|
||||
'客',
|
||||
'界',
|
||||
'屋'
|
||||
'屋',
|
||||
],
|
||||
10: [
|
||||
'旅',
|
||||
@@ -232,7 +232,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'起',
|
||||
'荷',
|
||||
'院',
|
||||
'員'
|
||||
'員',
|
||||
],
|
||||
11: [
|
||||
'問',
|
||||
@@ -253,7 +253,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'終',
|
||||
'祭',
|
||||
'球',
|
||||
'悪'
|
||||
'悪',
|
||||
],
|
||||
12: [
|
||||
'落',
|
||||
@@ -282,13 +282,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'開',
|
||||
'温',
|
||||
'運',
|
||||
'飲'
|
||||
'飲',
|
||||
],
|
||||
13: ['路', '福', '農', '鉄', '想', '詩', '業', '漢', '感', '意', '暗'],
|
||||
14: ['練', '緑', '様', '鼻', '銀', '駅'],
|
||||
15: ['箱', '調', '談', '横'],
|
||||
16: ['薬', '整', '橋', '館'],
|
||||
18: ['題']
|
||||
18: ['題'],
|
||||
},
|
||||
4: {
|
||||
4: ['夫', '不', '井', '氏', '欠'],
|
||||
@@ -318,7 +318,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'岐',
|
||||
'完',
|
||||
'改',
|
||||
'位'
|
||||
'位',
|
||||
],
|
||||
8: [
|
||||
'例',
|
||||
@@ -346,7 +346,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'芽',
|
||||
'果',
|
||||
'岡',
|
||||
'英'
|
||||
'英',
|
||||
],
|
||||
9: [
|
||||
'要',
|
||||
@@ -367,7 +367,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'建',
|
||||
'軍',
|
||||
'栄',
|
||||
'茨'
|
||||
'茨',
|
||||
],
|
||||
10: [
|
||||
'連',
|
||||
@@ -389,7 +389,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'訓',
|
||||
'挙',
|
||||
'害',
|
||||
'案'
|
||||
'案',
|
||||
],
|
||||
11: [
|
||||
'陸',
|
||||
@@ -410,7 +410,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'康',
|
||||
'健',
|
||||
'械',
|
||||
'貨'
|
||||
'貨',
|
||||
],
|
||||
12: [
|
||||
'量',
|
||||
@@ -434,7 +434,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'覚',
|
||||
'街',
|
||||
'賀',
|
||||
'媛'
|
||||
'媛',
|
||||
],
|
||||
13: ['働', '置', '続', '戦', '節', '照', '辞', '試', '群', '塩', '愛'],
|
||||
14: ['徳', '説', '静', '種', '察', '熊', '漁', '旗', '関', '管'],
|
||||
@@ -442,7 +442,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
16: ['録', '積', '機'],
|
||||
18: ['類', '験', '観'],
|
||||
19: ['鏡', '願'],
|
||||
20: ['競', '議']
|
||||
20: ['競', '議'],
|
||||
},
|
||||
5: {
|
||||
3: ['士', '久'],
|
||||
@@ -464,7 +464,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'技',
|
||||
'快',
|
||||
'応',
|
||||
'囲'
|
||||
'囲',
|
||||
],
|
||||
8: [
|
||||
'武',
|
||||
@@ -484,7 +484,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'河',
|
||||
'価',
|
||||
'往',
|
||||
'易'
|
||||
'易',
|
||||
],
|
||||
9: ['迷', '保', '独', '則', '祖', '政', '査', '厚', '故', '限', '型', '逆', '紀'],
|
||||
10: [
|
||||
@@ -505,7 +505,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'個',
|
||||
'格',
|
||||
'桜',
|
||||
'益'
|
||||
'益',
|
||||
],
|
||||
11: [
|
||||
'略',
|
||||
@@ -537,7 +537,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'基',
|
||||
'眼',
|
||||
'液',
|
||||
'移'
|
||||
'移',
|
||||
],
|
||||
12: [
|
||||
'貿',
|
||||
@@ -561,7 +561,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'検',
|
||||
'喜',
|
||||
'過',
|
||||
'営'
|
||||
'営',
|
||||
],
|
||||
13: ['夢', '豊', '墓', '損', '勢', '準', '飼', '資', '罪', '鉱', '禁', '義', '幹', '解'],
|
||||
14: [
|
||||
@@ -583,14 +583,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'構',
|
||||
'境',
|
||||
'慣',
|
||||
'演'
|
||||
'演',
|
||||
],
|
||||
15: ['暴', '編', '導', '賞', '質', '賛', '潔', '確'],
|
||||
16: ['輸', '燃', '築', '興', '衛'],
|
||||
17: ['績', '謝', '講'],
|
||||
18: ['職', '織', '額'],
|
||||
19: ['識'],
|
||||
20: ['護']
|
||||
20: ['護'],
|
||||
},
|
||||
6: {
|
||||
3: ['亡', '寸', '己', '干'],
|
||||
@@ -618,7 +618,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'供',
|
||||
'拡',
|
||||
'沿',
|
||||
'延'
|
||||
'延',
|
||||
],
|
||||
9: [
|
||||
'律',
|
||||
@@ -641,7 +641,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'巻',
|
||||
'革',
|
||||
'映',
|
||||
'胃'
|
||||
'胃',
|
||||
],
|
||||
10: [
|
||||
'朗',
|
||||
@@ -667,7 +667,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'降',
|
||||
'胸',
|
||||
'株',
|
||||
'恩'
|
||||
'恩',
|
||||
],
|
||||
11: [
|
||||
'翌',
|
||||
@@ -689,7 +689,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'済',
|
||||
'郷',
|
||||
'域',
|
||||
'異'
|
||||
'異',
|
||||
],
|
||||
12: [
|
||||
'棒',
|
||||
@@ -710,7 +710,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'勤',
|
||||
'貴',
|
||||
'揮',
|
||||
'割'
|
||||
'割',
|
||||
],
|
||||
13: [
|
||||
'裏',
|
||||
@@ -727,14 +727,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'傷',
|
||||
'署',
|
||||
'源',
|
||||
'絹'
|
||||
'絹',
|
||||
],
|
||||
14: ['模', '暮', '認', '層', '銭', '障', '磁', '誌', '穀', '誤', '疑', '閣'],
|
||||
15: ['論', '敵', '潮', '誕', '蔵', '諸', '熟', '権', '劇', '遺'],
|
||||
16: ['奮', '糖', '操', '縦', '樹', '鋼', '憲', '激'],
|
||||
17: ['覧', '優', '縮', '厳'],
|
||||
18: ['臨', '難', '簡'],
|
||||
19: ['臓', '警']
|
||||
19: ['臓', '警'],
|
||||
},
|
||||
7: {
|
||||
1: ['乙'],
|
||||
@@ -760,7 +760,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'斤',
|
||||
'凶',
|
||||
'刈',
|
||||
'介'
|
||||
'介',
|
||||
],
|
||||
5: [
|
||||
'矛',
|
||||
@@ -790,7 +790,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'且',
|
||||
'瓦',
|
||||
'牙',
|
||||
'凹'
|
||||
'凹',
|
||||
],
|
||||
6: [
|
||||
'劣',
|
||||
@@ -831,7 +831,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'汗',
|
||||
'汚',
|
||||
'芋',
|
||||
'扱'
|
||||
'扱',
|
||||
],
|
||||
7: [
|
||||
'弄',
|
||||
@@ -896,7 +896,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'肝',
|
||||
'戒',
|
||||
'壱',
|
||||
'亜'
|
||||
'亜',
|
||||
],
|
||||
8: [
|
||||
'枠',
|
||||
@@ -989,7 +989,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'押',
|
||||
'炎',
|
||||
'依',
|
||||
'宛'
|
||||
'宛',
|
||||
],
|
||||
9: [
|
||||
'郎',
|
||||
@@ -1081,7 +1081,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'畏',
|
||||
'為',
|
||||
'威',
|
||||
'哀'
|
||||
'哀',
|
||||
],
|
||||
10: [
|
||||
'脇',
|
||||
@@ -1206,7 +1206,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'浦',
|
||||
'畝',
|
||||
'唄',
|
||||
'挨'
|
||||
'挨',
|
||||
],
|
||||
11: [
|
||||
'累',
|
||||
@@ -1323,7 +1323,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'淫',
|
||||
'逸',
|
||||
'萎',
|
||||
'尉'
|
||||
'尉',
|
||||
],
|
||||
12: [
|
||||
'腕',
|
||||
@@ -1435,7 +1435,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'椅',
|
||||
'偉',
|
||||
'嵐',
|
||||
'握'
|
||||
'握',
|
||||
],
|
||||
13: [
|
||||
'賄',
|
||||
@@ -1552,7 +1552,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'猿',
|
||||
'煙',
|
||||
'違',
|
||||
'彙'
|
||||
'彙',
|
||||
],
|
||||
14: [
|
||||
'漏',
|
||||
@@ -1617,7 +1617,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'箇',
|
||||
'寡',
|
||||
'隠',
|
||||
'維'
|
||||
'維',
|
||||
],
|
||||
15: [
|
||||
'霊',
|
||||
@@ -1706,7 +1706,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'謁',
|
||||
'鋭',
|
||||
'影',
|
||||
'慰'
|
||||
'慰',
|
||||
],
|
||||
16: [
|
||||
'錬',
|
||||
@@ -1764,7 +1764,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'壊',
|
||||
'穏',
|
||||
'憶',
|
||||
'緯'
|
||||
'緯',
|
||||
],
|
||||
17: [
|
||||
'齢',
|
||||
@@ -1801,7 +1801,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'轄',
|
||||
'嚇',
|
||||
'臆',
|
||||
'曖'
|
||||
'曖',
|
||||
],
|
||||
18: [
|
||||
'糧',
|
||||
@@ -1830,7 +1830,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'韓',
|
||||
'鎌',
|
||||
'顎',
|
||||
'穫'
|
||||
'穫',
|
||||
],
|
||||
19: [
|
||||
'麓',
|
||||
@@ -1851,13 +1851,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'鶏',
|
||||
'繰',
|
||||
'艶',
|
||||
'韻'
|
||||
'韻',
|
||||
],
|
||||
20: ['欄', '騰', '籍', '醸', '譲', '鐘', '懸', '響'],
|
||||
21: ['露', '躍', '魔', '鶴', '顧', '艦'],
|
||||
22: ['籠', '襲', '驚'],
|
||||
23: ['鑑'],
|
||||
29: ['鬱']
|
||||
29: ['鬱'],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1866,7 +1866,8 @@ final Map<int, List<String>> JOUYOU_KANJI_BY_GRADES =
|
||||
.expand((entry) => entry.value.entries)
|
||||
.map((entry) => MapEntry(entry.key, entry.value))
|
||||
.fold<Map<int, List<String>>>(
|
||||
{},
|
||||
(acc, entry) => acc
|
||||
..putIfAbsent(entry.key, () => [])
|
||||
..update(entry.key, (value) => value..addAll(entry.value)));
|
||||
{},
|
||||
(acc, entry) => acc
|
||||
..putIfAbsent(entry.key, () => [])
|
||||
..update(entry.key, (value) => value..addAll(entry.value)),
|
||||
);
|
||||
|
||||
@@ -31,7 +31,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'九',
|
||||
'ユ',
|
||||
'乃',
|
||||
'𠂉'
|
||||
'𠂉',
|
||||
],
|
||||
3: [
|
||||
'⻌',
|
||||
@@ -78,7 +78,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'也',
|
||||
'亡',
|
||||
'及',
|
||||
'久'
|
||||
'久',
|
||||
],
|
||||
4: [
|
||||
'⺹',
|
||||
@@ -124,7 +124,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'五',
|
||||
'屯',
|
||||
'巴',
|
||||
'毋'
|
||||
'毋',
|
||||
],
|
||||
5: [
|
||||
'玄',
|
||||
@@ -154,7 +154,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'冊',
|
||||
'母',
|
||||
'⺲',
|
||||
'牙'
|
||||
'牙',
|
||||
],
|
||||
6: [
|
||||
'瓜',
|
||||
@@ -181,7 +181,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'血',
|
||||
'行',
|
||||
'衣',
|
||||
'西'
|
||||
'西',
|
||||
],
|
||||
7: [
|
||||
'臣',
|
||||
@@ -204,7 +204,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'釆',
|
||||
'里',
|
||||
'舛',
|
||||
'麦'
|
||||
'麦',
|
||||
],
|
||||
8: ['金', '長', '門', '隶', '隹', '雨', '青', '非', '奄', '岡', '免', '斉'],
|
||||
9: ['面', '革', '韭', '音', '頁', '風', '飛', '食', '首', '香', '品'],
|
||||
|
||||
@@ -43,6 +43,7 @@ enum JlptLevel implements Comparable<JlptLevel> {
|
||||
int? get asInt =>
|
||||
this == JlptLevel.none ? null : JlptLevel.values.indexOf(this);
|
||||
|
||||
@override
|
||||
String toString() => toNullableString() ?? 'N/A';
|
||||
|
||||
Object? toJson() => toNullableString();
|
||||
|
||||
@@ -11,7 +11,7 @@ String migrationDirPath() {
|
||||
}
|
||||
|
||||
Future<void> createEmptyDb(DatabaseExecutor db) async {
|
||||
List<String> migrationFiles = [];
|
||||
final List<String> migrationFiles = [];
|
||||
for (final file in Directory(migrationDirPath()).listSync()) {
|
||||
if (file is File && file.path.endsWith('.sql')) {
|
||||
migrationFiles.add(file.path);
|
||||
|
||||
@@ -19,20 +19,14 @@ enum JMdictDialect {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictDialect({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictDialect({required this.id, required this.description});
|
||||
|
||||
static JMdictDialect fromId(String id) => JMdictDialect.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictDialect fromJson(Map<String, Object?> json) =>
|
||||
JMdictDialect.values.firstWhere(
|
||||
|
||||
@@ -102,20 +102,14 @@ enum JMdictField {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictField({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictField({required this.id, required this.description});
|
||||
|
||||
static JMdictField fromId(String id) => JMdictField.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictField fromJson(Map<String, Object?> json) =>
|
||||
JMdictField.values.firstWhere(
|
||||
|
||||
@@ -13,20 +13,14 @@ enum JMdictKanjiInfo {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictKanjiInfo({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictKanjiInfo({required this.id, required this.description});
|
||||
|
||||
static JMdictKanjiInfo fromId(String id) => JMdictKanjiInfo.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictKanjiInfo fromJson(Map<String, Object?> json) =>
|
||||
JMdictKanjiInfo.values.firstWhere(
|
||||
|
||||
@@ -74,20 +74,14 @@ enum JMdictMisc {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictMisc({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictMisc({required this.id, required this.description});
|
||||
|
||||
static JMdictMisc fromId(String id) => JMdictMisc.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictMisc fromJson(Map<String, Object?> json) =>
|
||||
JMdictMisc.values.firstWhere(
|
||||
|
||||
@@ -202,14 +202,11 @@ enum JMdictPOS {
|
||||
String get shortDescription => _shortDescription ?? description;
|
||||
|
||||
static JMdictPOS fromId(String id) => JMdictPOS.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictPOS fromJson(Map<String, Object?> json) =>
|
||||
JMdictPOS.values.firstWhere(
|
||||
|
||||
@@ -15,10 +15,7 @@ enum JMdictReadingInfo {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictReadingInfo({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictReadingInfo({required this.id, required this.description});
|
||||
|
||||
static JMdictReadingInfo fromId(String id) =>
|
||||
JMdictReadingInfo.values.firstWhere(
|
||||
@@ -26,10 +23,7 @@ enum JMdictReadingInfo {
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictReadingInfo fromJson(Map<String, Object?> json) =>
|
||||
JMdictReadingInfo.values.firstWhere(
|
||||
|
||||
@@ -26,19 +26,14 @@ class KanjiSearchRadical extends Equatable {
|
||||
});
|
||||
|
||||
@override
|
||||
List<Object> get props => [
|
||||
symbol,
|
||||
this.names,
|
||||
forms,
|
||||
meanings,
|
||||
];
|
||||
List<Object> get props => [symbol, names, forms, meanings];
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'symbol': symbol,
|
||||
'names': names,
|
||||
'forms': forms,
|
||||
'meanings': meanings,
|
||||
};
|
||||
'symbol': symbol,
|
||||
'names': names,
|
||||
'forms': forms,
|
||||
'meanings': meanings,
|
||||
};
|
||||
|
||||
factory KanjiSearchRadical.fromJson(Map<String, dynamic> json) {
|
||||
return KanjiSearchRadical(
|
||||
|
||||
@@ -89,46 +89,46 @@ class KanjiSearchResult extends Equatable {
|
||||
@override
|
||||
// ignore: public_member_api_docs
|
||||
List<Object?> get props => [
|
||||
taughtIn,
|
||||
jlptLevel,
|
||||
newspaperFrequencyRank,
|
||||
strokeCount,
|
||||
meanings,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
// kunyomiExamples,
|
||||
// onyomiExamples,
|
||||
radical,
|
||||
parts,
|
||||
codepoints,
|
||||
kanji,
|
||||
nanori,
|
||||
alternativeLanguageReadings,
|
||||
strokeMiscounts,
|
||||
queryCodes,
|
||||
dictionaryReferences,
|
||||
];
|
||||
taughtIn,
|
||||
jlptLevel,
|
||||
newspaperFrequencyRank,
|
||||
strokeCount,
|
||||
meanings,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
// kunyomiExamples,
|
||||
// onyomiExamples,
|
||||
radical,
|
||||
parts,
|
||||
codepoints,
|
||||
kanji,
|
||||
nanori,
|
||||
alternativeLanguageReadings,
|
||||
strokeMiscounts,
|
||||
queryCodes,
|
||||
dictionaryReferences,
|
||||
];
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'kanji': kanji,
|
||||
'taughtIn': taughtIn,
|
||||
'jlptLevel': jlptLevel,
|
||||
'newspaperFrequencyRank': newspaperFrequencyRank,
|
||||
'strokeCount': strokeCount,
|
||||
'meanings': meanings,
|
||||
'kunyomi': kunyomi,
|
||||
'onyomi': onyomi,
|
||||
// 'onyomiExamples': onyomiExamples,
|
||||
// 'kunyomiExamples': kunyomiExamples,
|
||||
'radical': radical?.toJson(),
|
||||
'parts': parts,
|
||||
'codepoints': codepoints,
|
||||
'nanori': nanori,
|
||||
'alternativeLanguageReadings': alternativeLanguageReadings,
|
||||
'strokeMiscounts': strokeMiscounts,
|
||||
'queryCodes': queryCodes,
|
||||
'dictionaryReferences': dictionaryReferences,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'taughtIn': taughtIn,
|
||||
'jlptLevel': jlptLevel,
|
||||
'newspaperFrequencyRank': newspaperFrequencyRank,
|
||||
'strokeCount': strokeCount,
|
||||
'meanings': meanings,
|
||||
'kunyomi': kunyomi,
|
||||
'onyomi': onyomi,
|
||||
// 'onyomiExamples': onyomiExamples,
|
||||
// 'kunyomiExamples': kunyomiExamples,
|
||||
'radical': radical?.toJson(),
|
||||
'parts': parts,
|
||||
'codepoints': codepoints,
|
||||
'nanori': nanori,
|
||||
'alternativeLanguageReadings': alternativeLanguageReadings,
|
||||
'strokeMiscounts': strokeMiscounts,
|
||||
'queryCodes': queryCodes,
|
||||
'dictionaryReferences': dictionaryReferences,
|
||||
};
|
||||
|
||||
factory KanjiSearchResult.fromJson(Map<String, dynamic> json) {
|
||||
return KanjiSearchResult(
|
||||
@@ -156,23 +156,20 @@ class KanjiSearchResult extends Equatable {
|
||||
nanori: (json['nanori'] as List).map((e) => e as String).toList(),
|
||||
alternativeLanguageReadings:
|
||||
(json['alternativeLanguageReadings'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
(value as List).map((e) => e as String).toList(),
|
||||
),
|
||||
),
|
||||
strokeMiscounts:
|
||||
(json['strokeMiscounts'] as List).map((e) => e as int).toList(),
|
||||
(key, value) =>
|
||||
MapEntry(key, (value as List).map((e) => e as String).toList()),
|
||||
),
|
||||
strokeMiscounts: (json['strokeMiscounts'] as List)
|
||||
.map((e) => e as int)
|
||||
.toList(),
|
||||
queryCodes: (json['queryCodes'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
(value as List).map((e) => e as String).toList(),
|
||||
),
|
||||
(key, value) =>
|
||||
MapEntry(key, (value as List).map((e) => e as String).toList()),
|
||||
),
|
||||
dictionaryReferences:
|
||||
(json['dictionaryReferences'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(key, value as String),
|
||||
),
|
||||
(key, value) => MapEntry(key, value as String),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,14 +7,14 @@ import 'package:sqflite_common/sqlite_api.dart';
|
||||
Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
|
||||
final Set<String> tables = await db
|
||||
.query(
|
||||
'sqlite_master',
|
||||
columns: ['name'],
|
||||
where: 'type = ?',
|
||||
whereArgs: ['table'],
|
||||
)
|
||||
'sqlite_master',
|
||||
columns: ['name'],
|
||||
where: 'type = ?',
|
||||
whereArgs: ['table'],
|
||||
)
|
||||
.then((result) {
|
||||
return result.map((row) => row['name'] as String).toSet();
|
||||
});
|
||||
return result.map((row) => row['name'] as String).toSet();
|
||||
});
|
||||
|
||||
final Set<String> expectedTables = {
|
||||
...JMdictTableNames.allTables,
|
||||
@@ -26,14 +26,16 @@ Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
|
||||
final missingTables = expectedTables.difference(tables);
|
||||
|
||||
if (missingTables.isNotEmpty) {
|
||||
throw Exception([
|
||||
'Missing tables:',
|
||||
missingTables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Found tables:\n',
|
||||
tables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Please ensure the database is correctly set up.',
|
||||
].join('\n'));
|
||||
throw Exception(
|
||||
[
|
||||
'Missing tables:',
|
||||
missingTables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Found tables:\n',
|
||||
tables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Please ensure the database is correctly set up.',
|
||||
].join('\n'),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,18 +47,18 @@ class WordSearchResult {
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'_score': score,
|
||||
'entryId': entryId,
|
||||
'isCommon': isCommon,
|
||||
'japanese': japanese.map((e) => e.toJson()).toList(),
|
||||
'kanjiInfo':
|
||||
kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'readingInfo':
|
||||
readingInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'senses': senses.map((e) => e.toJson()).toList(),
|
||||
'jlptLevel': jlptLevel.toJson(),
|
||||
'sources': sources.toJson(),
|
||||
};
|
||||
'_score': score,
|
||||
'entryId': entryId,
|
||||
'isCommon': isCommon,
|
||||
'japanese': japanese.map((e) => e.toJson()).toList(),
|
||||
'kanjiInfo': kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'readingInfo': readingInfo.map(
|
||||
(key, value) => MapEntry(key, value.toJson()),
|
||||
),
|
||||
'senses': senses.map((e) => e.toJson()).toList(),
|
||||
'jlptLevel': jlptLevel.toJson(),
|
||||
'sources': sources.toJson(),
|
||||
};
|
||||
|
||||
factory WordSearchResult.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchResult(
|
||||
@@ -82,16 +82,16 @@ class WordSearchResult {
|
||||
);
|
||||
|
||||
String _formatJapaneseWord(WordSearchRuby word) =>
|
||||
word.furigana == null ? word.base : "${word.base} (${word.furigana})";
|
||||
word.furigana == null ? word.base : '${word.base} (${word.furigana})';
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
final japaneseWord = _formatJapaneseWord(japanese[0]);
|
||||
final isCommonString = isCommon ? '(C)' : '';
|
||||
final jlptLevelString = "(${jlptLevel.toString()})";
|
||||
final jlptLevelString = '(${jlptLevel.toString()})';
|
||||
|
||||
return '''
|
||||
${score} | [$entryId] $japaneseWord $isCommonString $jlptLevelString
|
||||
$score | [$entryId] $japaneseWord $isCommonString $jlptLevelString
|
||||
Other forms: ${japanese.skip(1).map(_formatJapaneseWord).join(', ')}
|
||||
Senses: ${senses.map((s) => s.englishDefinitions).join(', ')}
|
||||
'''
|
||||
|
||||
@@ -6,18 +6,12 @@ class WordSearchRuby {
|
||||
/// Furigana, if applicable.
|
||||
String? furigana;
|
||||
|
||||
WordSearchRuby({
|
||||
required this.base,
|
||||
this.furigana,
|
||||
});
|
||||
WordSearchRuby({required this.base, this.furigana});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'base': base,
|
||||
'furigana': furigana,
|
||||
};
|
||||
Map<String, dynamic> toJson() => {'base': base, 'furigana': furigana};
|
||||
|
||||
factory WordSearchRuby.fromJson(Map<String, dynamic> json) => WordSearchRuby(
|
||||
base: json['base'] as String,
|
||||
furigana: json['furigana'] as String?,
|
||||
);
|
||||
base: json['base'] as String,
|
||||
furigana: json['furigana'] as String?,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -71,18 +71,18 @@ class WordSearchSense {
|
||||
languageSource.isEmpty;
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'englishDefinitions': englishDefinitions,
|
||||
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
|
||||
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
|
||||
'antonyms': antonyms.map((e) => e.toJson()).toList(),
|
||||
'restrictedToReading': restrictedToReading,
|
||||
'restrictedToKanji': restrictedToKanji,
|
||||
'fields': fields.map((e) => e.toJson()).toList(),
|
||||
'dialects': dialects.map((e) => e.toJson()).toList(),
|
||||
'misc': misc.map((e) => e.toJson()).toList(),
|
||||
'info': info,
|
||||
'languageSource': languageSource,
|
||||
};
|
||||
'englishDefinitions': englishDefinitions,
|
||||
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
|
||||
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
|
||||
'antonyms': antonyms.map((e) => e.toJson()).toList(),
|
||||
'restrictedToReading': restrictedToReading,
|
||||
'restrictedToKanji': restrictedToKanji,
|
||||
'fields': fields.map((e) => e.toJson()).toList(),
|
||||
'dialects': dialects.map((e) => e.toJson()).toList(),
|
||||
'misc': misc.map((e) => e.toJson()).toList(),
|
||||
'info': info,
|
||||
'languageSource': languageSource,
|
||||
};
|
||||
|
||||
factory WordSearchSense.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSense(
|
||||
@@ -104,8 +104,9 @@ class WordSearchSense {
|
||||
dialects: (json['dialects'] as List)
|
||||
.map((e) => JMdictDialect.fromJson(e))
|
||||
.toList(),
|
||||
misc:
|
||||
(json['misc'] as List).map((e) => JMdictMisc.fromJson(e)).toList(),
|
||||
misc: (json['misc'] as List)
|
||||
.map((e) => JMdictMisc.fromJson(e))
|
||||
.toList(),
|
||||
info: List<String>.from(json['info']),
|
||||
languageSource: (json['languageSource'] as List)
|
||||
.map((e) => WordSearchSenseLanguageSource.fromJson(e))
|
||||
|
||||
@@ -13,11 +13,11 @@ class WordSearchSenseLanguageSource {
|
||||
});
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
|
||||
factory WordSearchSenseLanguageSource.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSenseLanguageSource(
|
||||
|
||||
@@ -7,20 +7,11 @@ class WordSearchSources {
|
||||
/// Whether JMnedict was used.
|
||||
final bool jmnedict;
|
||||
|
||||
const WordSearchSources({
|
||||
this.jmdict = true,
|
||||
this.jmnedict = false,
|
||||
});
|
||||
const WordSearchSources({this.jmdict = true, this.jmnedict = false});
|
||||
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'jmdict': jmdict,
|
||||
'jmnedict': jmnedict,
|
||||
};
|
||||
Map<String, Object?> get sqlValue => {'jmdict': jmdict, 'jmnedict': jmnedict};
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'jmdict': jmdict,
|
||||
'jmnedict': jmnedict,
|
||||
};
|
||||
Map<String, dynamic> toJson() => {'jmdict': jmdict, 'jmnedict': jmnedict};
|
||||
|
||||
factory WordSearchSources.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSources(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import 'package:jadb/models/word_search/word_search_result.dart';
|
||||
|
||||
/// A cross-reference entry from one word-result to another entry.
|
||||
class WordSearchXrefEntry {
|
||||
/// The ID of the entry that this entry cross-references to.
|
||||
@@ -13,19 +15,24 @@ class WordSearchXrefEntry {
|
||||
/// database (and hence might be incorrect).
|
||||
final bool ambiguous;
|
||||
|
||||
/// The result of the cross-reference, may or may not be included in the query.
|
||||
final WordSearchResult? xrefResult;
|
||||
|
||||
const WordSearchXrefEntry({
|
||||
required this.entryId,
|
||||
required this.ambiguous,
|
||||
required this.baseWord,
|
||||
required this.furigana,
|
||||
required this.xrefResult,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'entryId': entryId,
|
||||
'ambiguous': ambiguous,
|
||||
'baseWord': baseWord,
|
||||
'furigana': furigana,
|
||||
};
|
||||
'entryId': entryId,
|
||||
'ambiguous': ambiguous,
|
||||
'baseWord': baseWord,
|
||||
'furigana': furigana,
|
||||
'xrefResult': xrefResult?.toJson(),
|
||||
};
|
||||
|
||||
factory WordSearchXrefEntry.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchXrefEntry(
|
||||
@@ -33,5 +40,6 @@ class WordSearchXrefEntry {
|
||||
ambiguous: json['ambiguous'] as bool,
|
||||
baseWord: json['baseWord'] as String,
|
||||
furigana: json['furigana'] as String?,
|
||||
xrefResult: null,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import 'package:jadb/models/kanji_search/kanji_search_result.dart';
|
||||
import 'package:jadb/models/verify_tables.dart';
|
||||
import 'package:jadb/models/word_search/word_search_result.dart';
|
||||
import 'package:jadb/models/kanji_search/kanji_search_result.dart';
|
||||
import 'package:jadb/search/filter_kanji.dart';
|
||||
import 'package:jadb/search/kanji_search.dart';
|
||||
import 'package:jadb/search/radical_search.dart';
|
||||
import 'package:jadb/search/word_search/word_search.dart';
|
||||
|
||||
import 'package:jadb/search/kanji_search.dart';
|
||||
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
extension JaDBConnection on DatabaseExecutor {
|
||||
@@ -19,38 +17,45 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
Future<KanjiSearchResult?> jadbSearchKanji(String kanji) =>
|
||||
searchKanjiWithDbConnection(this, kanji);
|
||||
|
||||
/// Search for a kanji in the database.
|
||||
Future<Map<String, KanjiSearchResult>> jadbGetManyKanji(Set<String> kanji) =>
|
||||
searchManyKanjiWithDbConnection(this, kanji);
|
||||
|
||||
/// Filter a list of characters, and return the ones that are listed in the kanji dictionary.
|
||||
Future<List<String>> filterKanji(
|
||||
List<String> kanji, {
|
||||
bool deduplicate = false,
|
||||
}) =>
|
||||
filterKanjiWithDbConnection(this, kanji, deduplicate);
|
||||
}) => filterKanjiWithDbConnection(this, kanji, deduplicate);
|
||||
|
||||
/// Search for a word in the database.
|
||||
Future<List<WordSearchResult>?> jadbSearchWord(
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
int page = 0,
|
||||
int pageSize = 10,
|
||||
}) =>
|
||||
searchWordWithDbConnection(
|
||||
this,
|
||||
word,
|
||||
searchMode,
|
||||
page,
|
||||
pageSize,
|
||||
);
|
||||
int? pageSize,
|
||||
}) => searchWordWithDbConnection(
|
||||
this,
|
||||
word,
|
||||
searchMode: searchMode,
|
||||
page: page,
|
||||
pageSize: pageSize,
|
||||
);
|
||||
|
||||
///
|
||||
Future<WordSearchResult?> jadbGetWordById(int id) =>
|
||||
getWordByIdWithDbConnection(this, id);
|
||||
|
||||
/// Get a list of words by their IDs.
|
||||
///
|
||||
/// IDs for which no result is found are omitted from the returned value.
|
||||
Future<Map<int, WordSearchResult>> jadbGetManyWordsByIds(Set<int> ids) =>
|
||||
getWordsByIdsWithDbConnection(this, ids);
|
||||
|
||||
/// Search for a word in the database, and return the count of results.
|
||||
Future<int?> jadbSearchWordCount(
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
}) =>
|
||||
searchWordCountWithDbConnection(this, word, searchMode);
|
||||
}) => searchWordCountWithDbConnection(this, word, searchMode: searchMode);
|
||||
|
||||
/// Given a list of radicals, search which kanji contains all
|
||||
/// of the radicals, find their other radicals, and return those.
|
||||
|
||||
@@ -6,14 +6,13 @@ Future<List<String>> filterKanjiWithDbConnection(
|
||||
List<String> kanji,
|
||||
bool deduplicate,
|
||||
) async {
|
||||
final Set<String> filteredKanji = await connection.rawQuery(
|
||||
'''
|
||||
final Set<String> filteredKanji = await connection
|
||||
.rawQuery('''
|
||||
SELECT "literal"
|
||||
FROM "${KANJIDICTableNames.character}"
|
||||
WHERE "literal" IN (${kanji.map((_) => '?').join(',')})
|
||||
''',
|
||||
kanji,
|
||||
).then((value) => value.map((e) => e['literal'] as String).toSet());
|
||||
''', kanji)
|
||||
.then((value) => value.map((e) => e['literal'] as String).toSet());
|
||||
|
||||
if (deduplicate) {
|
||||
return filteredKanji.toList();
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:jadb/table_names/kanjidic.dart';
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:jadb/models/kanji_search/kanji_search_radical.dart';
|
||||
import 'package:jadb/models/kanji_search/kanji_search_result.dart';
|
||||
import 'package:jadb/table_names/kanjidic.dart';
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:jadb/util/romaji_transliteration.dart';
|
||||
import 'package:sqflite_common/sqflite.dart';
|
||||
|
||||
@@ -11,66 +11,66 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
String kanji,
|
||||
) async {
|
||||
late final List<Map<String, Object?>> characters;
|
||||
final characters_query = connection.query(
|
||||
final charactersQuery = connection.query(
|
||||
KANJIDICTableNames.character,
|
||||
where: "literal = ?",
|
||||
where: 'literal = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> codepoints;
|
||||
final codepoints_query = connection.query(
|
||||
final codepointsQuery = connection.query(
|
||||
KANJIDICTableNames.codepoint,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> kunyomis;
|
||||
final kunyomis_query = connection.query(
|
||||
final kunyomisQuery = connection.query(
|
||||
KANJIDICTableNames.kunyomi,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: "orderNum",
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> onyomis;
|
||||
final onyomis_query = connection.query(
|
||||
final onyomisQuery = connection.query(
|
||||
KANJIDICTableNames.onyomi,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: "orderNum",
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> meanings;
|
||||
final meanings_query = connection.query(
|
||||
final meaningsQuery = connection.query(
|
||||
KANJIDICTableNames.meaning,
|
||||
where: "kanji = ? AND language = ?",
|
||||
where: 'kanji = ? AND language = ?',
|
||||
whereArgs: [kanji, 'eng'],
|
||||
orderBy: "orderNum",
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> nanoris;
|
||||
final nanoris_query = connection.query(
|
||||
final nanorisQuery = connection.query(
|
||||
KANJIDICTableNames.nanori,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> dictionary_references;
|
||||
final dictionary_references_query = connection.query(
|
||||
late final List<Map<String, Object?>> dictionaryReferences;
|
||||
final dictionaryReferencesQuery = connection.query(
|
||||
KANJIDICTableNames.dictionaryReference,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> query_codes;
|
||||
final query_codes_query = connection.query(
|
||||
late final List<Map<String, Object?>> queryCodes;
|
||||
final queryCodesQuery = connection.query(
|
||||
KANJIDICTableNames.queryCode,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> radicals;
|
||||
final radicals_query = connection.rawQuery(
|
||||
final radicalsQuery = connection.rawQuery(
|
||||
'''
|
||||
SELECT DISTINCT
|
||||
"XREF__KANJIDIC_Radical__RADKFILE"."radicalSymbol" AS "symbol",
|
||||
@@ -88,23 +88,23 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> parts;
|
||||
final parts_query = connection.query(
|
||||
final partsQuery = connection.query(
|
||||
RADKFILETableNames.radkfile,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readings;
|
||||
final readings_query = connection.query(
|
||||
final readingsQuery = connection.query(
|
||||
KANJIDICTableNames.reading,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> stroke_miscounts;
|
||||
final stroke_miscounts_query = connection.query(
|
||||
late final List<Map<String, Object?>> strokeMiscounts;
|
||||
final strokeMiscountsQuery = connection.query(
|
||||
KANJIDICTableNames.strokeMiscount,
|
||||
where: "kanji = ?",
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
@@ -116,29 +116,29 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
// whereArgs: [kanji],
|
||||
// );
|
||||
|
||||
// TODO: Search for kunyomi and onyomi usage of the characters
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
|
||||
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
|
||||
// by JLPT, news frequency, etc.
|
||||
// TODO: Search for kunyomi and onyomi usage of the characters
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
|
||||
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
|
||||
// by JLPT, news frequency, etc.
|
||||
|
||||
await characters_query.then((value) => characters = value);
|
||||
await charactersQuery.then((value) => characters = value);
|
||||
|
||||
if (characters.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
await Future.wait({
|
||||
codepoints_query.then((value) => codepoints = value),
|
||||
kunyomis_query.then((value) => kunyomis = value),
|
||||
onyomis_query.then((value) => onyomis = value),
|
||||
meanings_query.then((value) => meanings = value),
|
||||
nanoris_query.then((value) => nanoris = value),
|
||||
dictionary_references_query.then((value) => dictionary_references = value),
|
||||
query_codes_query.then((value) => query_codes = value),
|
||||
radicals_query.then((value) => radicals = value),
|
||||
parts_query.then((value) => parts = value),
|
||||
readings_query.then((value) => readings = value),
|
||||
stroke_miscounts_query.then((value) => stroke_miscounts = value),
|
||||
codepointsQuery.then((value) => codepoints = value),
|
||||
kunyomisQuery.then((value) => kunyomis = value),
|
||||
onyomisQuery.then((value) => onyomis = value),
|
||||
meaningsQuery.then((value) => meanings = value),
|
||||
nanorisQuery.then((value) => nanoris = value),
|
||||
dictionaryReferencesQuery.then((value) => dictionaryReferences = value),
|
||||
queryCodesQuery.then((value) => queryCodes = value),
|
||||
radicalsQuery.then((value) => radicals = value),
|
||||
partsQuery.then((value) => parts = value),
|
||||
readingsQuery.then((value) => readings = value),
|
||||
strokeMiscountsQuery.then((value) => strokeMiscounts = value),
|
||||
// variants_query.then((value) => variants = value),
|
||||
});
|
||||
|
||||
@@ -157,9 +157,7 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
: null;
|
||||
|
||||
final alternativeLanguageReadings = readings
|
||||
.groupListsBy(
|
||||
(item) => item['type'] as String,
|
||||
)
|
||||
.groupListsBy((item) => item['type'] as String)
|
||||
.map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
@@ -168,20 +166,16 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
);
|
||||
|
||||
// TODO: Add `SKIPMisclassification` to the entries
|
||||
final queryCodes = query_codes
|
||||
.groupListsBy(
|
||||
(item) => item['type'] as String,
|
||||
)
|
||||
final queryCodes_ = queryCodes
|
||||
.groupListsBy((item) => item['type'] as String)
|
||||
.map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
value.map((item) => item['code'] as String).toList(),
|
||||
),
|
||||
(key, value) =>
|
||||
MapEntry(key, value.map((item) => item['code'] as String).toList()),
|
||||
);
|
||||
|
||||
// TODO: Add `volume` and `page` to the entries
|
||||
final dictionaryReferences = {
|
||||
for (final entry in dictionary_references)
|
||||
final dictionaryReferences_ = {
|
||||
for (final entry in dictionaryReferences)
|
||||
entry['type'] as String: entry['ref'] as String,
|
||||
};
|
||||
|
||||
@@ -213,9 +207,32 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
},
|
||||
nanori: nanoris.map((item) => item['nanori'] as String).toList(),
|
||||
alternativeLanguageReadings: alternativeLanguageReadings,
|
||||
strokeMiscounts:
|
||||
stroke_miscounts.map((item) => item['strokeCount'] as int).toList(),
|
||||
queryCodes: queryCodes,
|
||||
dictionaryReferences: dictionaryReferences,
|
||||
strokeMiscounts: strokeMiscounts
|
||||
.map((item) => item['strokeCount'] as int)
|
||||
.toList(),
|
||||
queryCodes: queryCodes_,
|
||||
dictionaryReferences: dictionaryReferences_,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Use fewer queries with `IN` clauses to reduce the number of queries
|
||||
|
||||
Future<Map<String, KanjiSearchResult>> searchManyKanjiWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
Set<String> kanji,
|
||||
) async {
|
||||
if (kanji.isEmpty) {
|
||||
return {};
|
||||
}
|
||||
|
||||
final results = <String, KanjiSearchResult>{};
|
||||
|
||||
for (final k in kanji) {
|
||||
final result = await searchKanjiWithDbConnection(connection, k);
|
||||
if (result != null) {
|
||||
results[k] = result;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@@ -19,14 +19,12 @@ Future<List<String>> searchRemainingRadicalsWithDbConnection(
|
||||
HAVING COUNT(DISTINCT "radical") = ?
|
||||
)
|
||||
''',
|
||||
[
|
||||
...radicals,
|
||||
radicals.length,
|
||||
],
|
||||
[...radicals, radicals.length],
|
||||
);
|
||||
|
||||
final remainingRadicals =
|
||||
queryResult.map((row) => row['radical'] as String).toList();
|
||||
final remainingRadicals = queryResult
|
||||
.map((row) => row['radical'] as String)
|
||||
.toList();
|
||||
|
||||
return remainingRadicals;
|
||||
}
|
||||
@@ -43,10 +41,7 @@ Future<List<String>> searchKanjiByRadicalsWithDbConnection(
|
||||
GROUP BY "kanji"
|
||||
HAVING COUNT(DISTINCT "radical") = ?
|
||||
''',
|
||||
[
|
||||
...radicals,
|
||||
radicals.length,
|
||||
],
|
||||
[...radicals, radicals.length],
|
||||
);
|
||||
|
||||
final kanji = queryResult.map((row) => row['kanji'] as String).toList();
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:jadb/table_names/tanos_jlpt.dart';
|
||||
import 'package:jadb/util/sqlite_utils.dart';
|
||||
import 'package:sqflite_common/sqflite.dart';
|
||||
|
||||
class LinearWordQueryData {
|
||||
@@ -25,6 +24,9 @@ class LinearWordQueryData {
|
||||
final List<Map<String, Object?>> readingElementRestrictions;
|
||||
final List<Map<String, Object?>> kanjiElementInfos;
|
||||
|
||||
final LinearWordQueryData? senseAntonymData;
|
||||
final LinearWordQueryData? senseSeeAlsoData;
|
||||
|
||||
const LinearWordQueryData({
|
||||
required this.senses,
|
||||
required this.readingElements,
|
||||
@@ -46,59 +48,62 @@ class LinearWordQueryData {
|
||||
required this.readingElementInfos,
|
||||
required this.readingElementRestrictions,
|
||||
required this.kanjiElementInfos,
|
||||
required this.senseAntonymData,
|
||||
required this.senseSeeAlsoData,
|
||||
});
|
||||
}
|
||||
|
||||
Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) async {
|
||||
List<int> entryIds, {
|
||||
bool fetchXrefData = true,
|
||||
}) async {
|
||||
late final List<Map<String, Object?>> senses;
|
||||
final Future<List<Map<String, Object?>>> senses_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> sensesQuery = connection.query(
|
||||
JMdictTableNames.sense,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElements;
|
||||
final Future<List<Map<String, Object?>>> readingElements_query =
|
||||
connection.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> readingelementsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElements;
|
||||
final Future<List<Map<String, Object?>>> kanjiElements_query =
|
||||
connection.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> kanjielementsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> jlptTags;
|
||||
final Future<List<Map<String, Object?>>> jlptTags_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> jlpttagsQuery = connection.query(
|
||||
TanosJLPTTableNames.jlptTag,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> commonEntries;
|
||||
final Future<List<Map<String, Object?>>> commonEntries_query =
|
||||
connection.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> commonentriesQuery = connection
|
||||
.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
senses_query.then((value) => senses = value),
|
||||
readingElements_query.then((value) => readingElements = value),
|
||||
kanjiElements_query.then((value) => kanjiElements = value),
|
||||
jlptTags_query.then((value) => jlptTags = value),
|
||||
commonEntries_query.then((value) => commonEntries = value),
|
||||
sensesQuery.then((value) => senses = value),
|
||||
readingelementsQuery.then((value) => readingElements = value),
|
||||
kanjielementsQuery.then((value) => kanjiElements = value),
|
||||
jlpttagsQuery.then((value) => jlptTags = value),
|
||||
commonentriesQuery.then((value) => commonEntries = value),
|
||||
]);
|
||||
|
||||
// Sense queries
|
||||
@@ -106,9 +111,9 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
final senseIds = senses.map((sense) => sense['senseId'] as int).toList();
|
||||
|
||||
late final List<Map<String, Object?>> senseAntonyms;
|
||||
final Future<List<Map<String, Object?>>> senseAntonyms_query =
|
||||
connection.rawQuery(
|
||||
"""
|
||||
final Future<List<Map<String, Object?>>> senseantonymsQuery = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseAntonyms}".senseId,
|
||||
"${JMdictTableNames.senseAntonyms}".ambiguous,
|
||||
@@ -125,81 +130,81 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
"${JMdictTableNames.senseAntonyms}"."senseId",
|
||||
"${JMdictTableNames.senseAntonyms}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseDialects;
|
||||
final Future<List<Map<String, Object?>>> senseDialects_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> sensedialectsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseFields;
|
||||
final Future<List<Map<String, Object?>>> senseFields_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> sensefieldsQuery = connection.query(
|
||||
JMdictTableNames.senseField,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseGlossaries;
|
||||
final Future<List<Map<String, Object?>>> senseGlossaries_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> senseglossariesQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseInfos;
|
||||
final Future<List<Map<String, Object?>>> senseInfos_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> senseinfosQuery = connection.query(
|
||||
JMdictTableNames.senseInfo,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseLanguageSources;
|
||||
final Future<List<Map<String, Object?>>> senseLanguageSources_query =
|
||||
final Future<List<Map<String, Object?>>> senselanguagesourcesQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseMiscs;
|
||||
final Future<List<Map<String, Object?>>> senseMiscs_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> sensemiscsQuery = connection.query(
|
||||
JMdictTableNames.senseMisc,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> sensePOSs;
|
||||
final Future<List<Map<String, Object?>>> sensePOSs_query = connection.query(
|
||||
final Future<List<Map<String, Object?>>> sensepossQuery = connection.query(
|
||||
JMdictTableNames.sensePOS,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseRestrictedToKanjis;
|
||||
final Future<List<Map<String, Object?>>> senseRestrictedToKanjis_query =
|
||||
final Future<List<Map<String, Object?>>> senserestrictedtokanjisQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseRestrictedToReadings;
|
||||
final Future<List<Map<String, Object?>>> senseRestrictedToReadings_query =
|
||||
final Future<List<Map<String, Object?>>> senserestrictedtoreadingsQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseSeeAlsos;
|
||||
final Future<List<Map<String, Object?>>> senseSeeAlsos_query =
|
||||
connection.rawQuery(
|
||||
"""
|
||||
final Future<List<Map<String, Object?>>> senseseealsosQuery = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."ambiguous",
|
||||
@@ -216,16 +221,16 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> exampleSentences;
|
||||
final Future<List<Map<String, Object?>>> exampleSentences_query =
|
||||
connection.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> examplesentencesQuery = connection
|
||||
.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
// Reading queries
|
||||
|
||||
@@ -234,20 +239,22 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
.toList();
|
||||
|
||||
late final List<Map<String, Object?>> readingElementInfos;
|
||||
final Future<List<Map<String, Object?>>> readingElementInfos_query =
|
||||
final Future<List<Map<String, Object?>>> readingelementinfosQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.readingInfo,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
JMdictTableNames.readingInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElementRestrictions;
|
||||
final Future<List<Map<String, Object?>>> readingElementRestrictions_query =
|
||||
final Future<List<Map<String, Object?>>> readingelementrestrictionsQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.readingRestriction,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
JMdictTableNames.readingRestriction,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
// Kanji queries
|
||||
|
||||
@@ -256,32 +263,64 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
.toList();
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElementInfos;
|
||||
final Future<List<Map<String, Object?>>> kanjiElementInfos_query =
|
||||
connection.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where: '(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> kanjielementinfosQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
|
||||
// Xref data queries
|
||||
await Future.wait([
|
||||
senseantonymsQuery.then((value) => senseAntonyms = value),
|
||||
senseseealsosQuery.then((value) => senseSeeAlsos = value),
|
||||
]);
|
||||
|
||||
late final LinearWordQueryData? senseAntonymData;
|
||||
final Future<LinearWordQueryData?> senseantonymdataQuery =
|
||||
fetchXrefData
|
||||
? fetchLinearWordQueryData(
|
||||
connection,
|
||||
senseAntonyms
|
||||
.map((antonym) => antonym['xrefEntryId'] as int)
|
||||
.toList(),
|
||||
fetchXrefData: false,
|
||||
)
|
||||
: Future.value(null);
|
||||
|
||||
late final LinearWordQueryData? senseSeeAlsoData;
|
||||
final Future<LinearWordQueryData?> senseseealsodataQuery =
|
||||
fetchXrefData
|
||||
? fetchLinearWordQueryData(
|
||||
connection,
|
||||
senseSeeAlsos.map((seeAlso) => seeAlso['xrefEntryId'] as int).toList(),
|
||||
fetchXrefData: false,
|
||||
)
|
||||
: Future.value(null);
|
||||
|
||||
await Future.wait([
|
||||
senseAntonyms_query.then((value) => senseAntonyms = value),
|
||||
senseDialects_query.then((value) => senseDialects = value),
|
||||
senseFields_query.then((value) => senseFields = value),
|
||||
senseGlossaries_query.then((value) => senseGlossaries = value),
|
||||
senseInfos_query.then((value) => senseInfos = value),
|
||||
senseLanguageSources_query.then((value) => senseLanguageSources = value),
|
||||
senseMiscs_query.then((value) => senseMiscs = value),
|
||||
sensePOSs_query.then((value) => sensePOSs = value),
|
||||
senseRestrictedToKanjis_query
|
||||
.then((value) => senseRestrictedToKanjis = value),
|
||||
senseRestrictedToReadings_query
|
||||
.then((value) => senseRestrictedToReadings = value),
|
||||
senseSeeAlsos_query.then((value) => senseSeeAlsos = value),
|
||||
exampleSentences_query.then((value) => exampleSentences = value),
|
||||
readingElementInfos_query.then((value) => readingElementInfos = value),
|
||||
readingElementRestrictions_query
|
||||
.then((value) => readingElementRestrictions = value),
|
||||
kanjiElementInfos_query.then((value) => kanjiElementInfos = value),
|
||||
sensedialectsQuery.then((value) => senseDialects = value),
|
||||
sensefieldsQuery.then((value) => senseFields = value),
|
||||
senseglossariesQuery.then((value) => senseGlossaries = value),
|
||||
senseinfosQuery.then((value) => senseInfos = value),
|
||||
senselanguagesourcesQuery.then((value) => senseLanguageSources = value),
|
||||
sensemiscsQuery.then((value) => senseMiscs = value),
|
||||
sensepossQuery.then((value) => sensePOSs = value),
|
||||
senserestrictedtokanjisQuery.then(
|
||||
(value) => senseRestrictedToKanjis = value,
|
||||
),
|
||||
senserestrictedtoreadingsQuery.then(
|
||||
(value) => senseRestrictedToReadings = value,
|
||||
),
|
||||
examplesentencesQuery.then((value) => exampleSentences = value),
|
||||
readingelementinfosQuery.then((value) => readingElementInfos = value),
|
||||
readingelementrestrictionsQuery.then(
|
||||
(value) => readingElementRestrictions = value,
|
||||
),
|
||||
kanjielementinfosQuery.then((value) => kanjiElementInfos = value),
|
||||
senseantonymdataQuery.then((value) => senseAntonymData = value),
|
||||
senseseealsodataQuery.then((value) => senseSeeAlsoData = value),
|
||||
]);
|
||||
|
||||
return LinearWordQueryData(
|
||||
@@ -305,5 +344,7 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
readingElementInfos: readingElementInfos,
|
||||
readingElementRestrictions: readingElementRestrictions,
|
||||
kanjiElementInfos: kanjiElementInfos,
|
||||
senseAntonymData: senseAntonymData,
|
||||
senseSeeAlsoData: senseSeeAlsoData,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:jadb/search/word_search/word_search.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:jadb/util/text_filtering.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
@@ -37,90 +37,105 @@ String _filterFTSSensitiveCharacters(String word) {
|
||||
.replaceAll('(', '')
|
||||
.replaceAll(')', '')
|
||||
.replaceAll('^', '')
|
||||
.replaceAll('\"', '');
|
||||
.replaceAll('"', '');
|
||||
}
|
||||
|
||||
(String, List<Object?>) _kanjiReadingTemplate(
|
||||
String tableName,
|
||||
String word, {
|
||||
int pageSize = 10,
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
bool countOnly = false,
|
||||
}) =>
|
||||
(
|
||||
'''
|
||||
}) {
|
||||
assert(
|
||||
tableName == JMdictTableNames.kanjiElement ||
|
||||
tableName == JMdictTableNames.readingElement,
|
||||
);
|
||||
assert(!countOnly || pageSize == null);
|
||||
assert(!countOnly || offset == null);
|
||||
assert(pageSize == null || pageSize > 0);
|
||||
assert(offset == null || offset >= 0);
|
||||
assert(
|
||||
offset == null || pageSize != null,
|
||||
'Offset should only be used with pageSize set',
|
||||
);
|
||||
|
||||
return (
|
||||
'''
|
||||
WITH
|
||||
fts_results AS (
|
||||
SELECT DISTINCT
|
||||
"${tableName}"."entryId",
|
||||
"$tableName"."entryId",
|
||||
100
|
||||
+ (("${tableName}FTS"."reading" = ?) * 10000)
|
||||
+ "JMdict_EntryScore"."score"
|
||||
AS "score"
|
||||
FROM "${tableName}FTS"
|
||||
JOIN "${tableName}" USING ("elementId")
|
||||
JOIN "$tableName" USING ("elementId")
|
||||
JOIN "JMdict_EntryScore" USING ("elementId")
|
||||
WHERE "${tableName}FTS"."reading" MATCH ? || '*'
|
||||
AND "JMdict_EntryScore"."type" = '${tableName == JMdictTableNames.kanjiElement ? 'k' : 'r'}'
|
||||
${!countOnly ? 'LIMIT ?' : ''}
|
||||
),
|
||||
non_fts_results AS (
|
||||
SELECT DISTINCT
|
||||
"${tableName}"."entryId",
|
||||
"$tableName"."entryId",
|
||||
50
|
||||
+ "JMdict_EntryScore"."score"
|
||||
AS "score"
|
||||
FROM "${tableName}"
|
||||
FROM "$tableName"
|
||||
JOIN "JMdict_EntryScore" USING ("elementId")
|
||||
WHERE "reading" LIKE '%' || ? || '%'
|
||||
AND "${tableName}"."entryId" NOT IN (SELECT "entryId" FROM "fts_results")
|
||||
AND "$tableName"."entryId" NOT IN (SELECT "entryId" FROM "fts_results")
|
||||
AND "JMdict_EntryScore"."type" = '${tableName == JMdictTableNames.kanjiElement ? 'k' : 'r'}'
|
||||
${!countOnly ? 'LIMIT ?' : ''}
|
||||
)
|
||||
|
||||
${countOnly ? 'SELECT COUNT("entryId") AS count' : 'SELECT "entryId", MAX("score") AS "score"'}
|
||||
SELECT ${countOnly ? 'COUNT(DISTINCT "entryId") AS count' : '"entryId", MAX("score") AS "score"'}
|
||||
FROM (
|
||||
SELECT * FROM fts_results
|
||||
SELECT * FROM "fts_results"
|
||||
UNION
|
||||
SELECT * FROM non_fts_results
|
||||
SELECT * FROM "non_fts_results"
|
||||
)
|
||||
GROUP BY "entryId"
|
||||
ORDER BY
|
||||
"score" DESC,
|
||||
"entryId" ASC
|
||||
${!countOnly ? 'GROUP BY "entryId"' : ''}
|
||||
${!countOnly ? 'ORDER BY "score" DESC, "entryId" ASC' : ''}
|
||||
${pageSize != null ? 'LIMIT ?' : ''}
|
||||
${offset != null ? 'OFFSET ?' : ''}
|
||||
'''
|
||||
.trim(),
|
||||
[
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
if (!countOnly) pageSize,
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
if (!countOnly) pageSize,
|
||||
]
|
||||
);
|
||||
.trim(),
|
||||
[
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
?pageSize,
|
||||
?offset,
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
Future<List<ScoredEntryId>> _queryKanji(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
int pageSize,
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.kanjiElement,
|
||||
word,
|
||||
pageSize: pageSize,
|
||||
offset: offset,
|
||||
);
|
||||
return connection.rawQuery(query, args).then((result) => result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.toList());
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then(
|
||||
(result) => result
|
||||
.map(
|
||||
(row) =>
|
||||
ScoredEntryId(row['entryId'] as int, row['score'] as int),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> _queryKanjiCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) {
|
||||
Future<int> _queryKanjiCount(DatabaseExecutor connection, String word) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.kanjiElement,
|
||||
word,
|
||||
@@ -128,32 +143,34 @@ Future<int> _queryKanjiCount(
|
||||
);
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then((result) => result.first['count'] as int);
|
||||
.then((result) => result.firstOrNull?['count'] as int? ?? 0);
|
||||
}
|
||||
|
||||
Future<List<ScoredEntryId>> _queryKana(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
int pageSize,
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.readingElement,
|
||||
word,
|
||||
pageSize: pageSize,
|
||||
offset: offset,
|
||||
);
|
||||
return connection.rawQuery(query, args).then((result) => result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.toList());
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then(
|
||||
(result) => result
|
||||
.map(
|
||||
(row) =>
|
||||
ScoredEntryId(row['entryId'] as int, row['score'] as int),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> _queryKanaCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) {
|
||||
Future<int> _queryKanaCount(DatabaseExecutor connection, String word) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.readingElement,
|
||||
word,
|
||||
@@ -161,15 +178,22 @@ Future<int> _queryKanaCount(
|
||||
);
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then((result) => result.first['count'] as int);
|
||||
.then((result) => result.firstOrNull?['count'] as int? ?? 0);
|
||||
}
|
||||
|
||||
Future<List<ScoredEntryId>> _queryEnglish(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
int pageSize,
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
) async {
|
||||
assert(pageSize == null || pageSize > 0);
|
||||
assert(offset == null || offset >= 0);
|
||||
assert(
|
||||
offset == null || pageSize != null,
|
||||
'Offset should only be used with pageSize set',
|
||||
);
|
||||
|
||||
final result = await connection.rawQuery(
|
||||
'''
|
||||
SELECT
|
||||
@@ -191,41 +215,25 @@ Future<List<ScoredEntryId>> _queryEnglish(
|
||||
OFFSET ?
|
||||
'''
|
||||
.trim(),
|
||||
[
|
||||
word,
|
||||
word,
|
||||
word,
|
||||
'%${word.replaceAll('%', '')}%',
|
||||
pageSize,
|
||||
offset,
|
||||
],
|
||||
[word, word, word, '%${word.replaceAll('%', '')}%', pageSize, offset],
|
||||
);
|
||||
|
||||
return result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.map((row) => ScoredEntryId(row['entryId'] as int, row['score'] as int))
|
||||
.toList();
|
||||
}
|
||||
|
||||
Future<int> _queryEnglishCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) async {
|
||||
Future<int> _queryEnglishCount(DatabaseExecutor connection, String word) async {
|
||||
final result = await connection.rawQuery(
|
||||
'''
|
||||
|
||||
SELECT
|
||||
COUNT(DISTINCT "${JMdictTableNames.sense}"."entryId") AS "count"
|
||||
FROM "${JMdictTableNames.senseGlossary}"
|
||||
JOIN "${JMdictTableNames.sense}" USING ("senseId")
|
||||
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?
|
||||
'''
|
||||
SELECT
|
||||
COUNT(DISTINCT "${JMdictTableNames.sense}"."entryId") AS "count"
|
||||
FROM "${JMdictTableNames.senseGlossary}"
|
||||
JOIN "${JMdictTableNames.sense}" USING ("senseId")
|
||||
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?
|
||||
'''
|
||||
.trim(),
|
||||
[
|
||||
'%$word%',
|
||||
],
|
||||
['%$word%'],
|
||||
);
|
||||
|
||||
return result.first['count'] as int;
|
||||
@@ -235,55 +243,34 @@ Future<List<ScoredEntryId>> fetchEntryIds(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
SearchMode searchMode,
|
||||
int pageSize,
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
) async {
|
||||
if (searchMode == SearchMode.Auto) {
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
assert(
|
||||
word.isNotEmpty,
|
||||
'Word should not be empty when fetching entry IDs',
|
||||
);
|
||||
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
|
||||
|
||||
late final List<ScoredEntryId> entryIds;
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
entryIds = await _queryKanji(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryKanji(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
entryIds = await _queryKana(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryKana(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
entryIds = await _queryEnglish(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryEnglish(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
default:
|
||||
throw UnimplementedError(
|
||||
'Search mode $searchMode is not implemented',
|
||||
);
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
;
|
||||
|
||||
return entryIds;
|
||||
}
|
||||
@@ -297,41 +284,27 @@ Future<int?> fetchEntryIdCount(
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
assert(
|
||||
word.isNotEmpty,
|
||||
'Word should not be empty when fetching entry IDs',
|
||||
);
|
||||
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
|
||||
|
||||
late final int? entryIdCount;
|
||||
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
entryIdCount = await _queryKanjiCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryKanjiCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
entryIdCount = await _queryKanaCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryKanaCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
entryIdCount = await _queryEnglishCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryEnglishCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
default:
|
||||
throw UnimplementedError(
|
||||
'Search mode $searchMode is not implemented',
|
||||
);
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
|
||||
return entryIdCount;
|
||||
|
||||
@@ -12,50 +12,37 @@ import 'package:jadb/models/word_search/word_search_sense.dart';
|
||||
import 'package:jadb/models/word_search/word_search_sense_language_source.dart';
|
||||
import 'package:jadb/models/word_search/word_search_sources.dart';
|
||||
import 'package:jadb/models/word_search/word_search_xref_entry.dart';
|
||||
import 'package:jadb/search/word_search/data_query.dart';
|
||||
import 'package:jadb/search/word_search/entry_id_query.dart';
|
||||
|
||||
List<WordSearchResult> regroupWordSearchResults({
|
||||
required List<ScoredEntryId> entryIds,
|
||||
required List<Map<String, Object?>> readingElements,
|
||||
required List<Map<String, Object?>> kanjiElements,
|
||||
required List<Map<String, Object?>> jlptTags,
|
||||
required List<Map<String, Object?>> commonEntries,
|
||||
required List<Map<String, Object?>> senses,
|
||||
required List<Map<String, Object?>> senseAntonyms,
|
||||
required List<Map<String, Object?>> senseDialects,
|
||||
required List<Map<String, Object?>> senseFields,
|
||||
required List<Map<String, Object?>> senseGlossaries,
|
||||
required List<Map<String, Object?>> senseInfos,
|
||||
required List<Map<String, Object?>> senseLanguageSources,
|
||||
required List<Map<String, Object?>> senseMiscs,
|
||||
required List<Map<String, Object?>> sensePOSs,
|
||||
required List<Map<String, Object?>> senseRestrictedToKanjis,
|
||||
required List<Map<String, Object?>> senseRestrictedToReadings,
|
||||
required List<Map<String, Object?>> senseSeeAlsos,
|
||||
required List<Map<String, Object?>> exampleSentences,
|
||||
required List<Map<String, Object?>> readingElementInfos,
|
||||
required List<Map<String, Object?>> readingElementRestrictions,
|
||||
required List<Map<String, Object?>> kanjiElementInfos,
|
||||
required LinearWordQueryData linearWordQueryData,
|
||||
}) {
|
||||
final List<WordSearchResult> results = [];
|
||||
|
||||
final commonEntryIds =
|
||||
commonEntries.map((entry) => entry['entryId'] as int).toSet();
|
||||
final commonEntryIds = linearWordQueryData.commonEntries
|
||||
.map((entry) => entry['entryId'] as int)
|
||||
.toSet();
|
||||
|
||||
for (final scoredEntryId in entryIds) {
|
||||
final List<Map<String, Object?>> entryReadingElements = readingElements
|
||||
final List<Map<String, Object?>> entryReadingElements = linearWordQueryData
|
||||
.readingElements
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
final List<Map<String, Object?>> entryKanjiElements = kanjiElements
|
||||
final List<Map<String, Object?>> entryKanjiElements = linearWordQueryData
|
||||
.kanjiElements
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
final List<Map<String, Object?>> entryJlptTags = jlptTags
|
||||
final List<Map<String, Object?>> entryJlptTags = linearWordQueryData
|
||||
.jlptTags
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
final jlptLevel = entryJlptTags
|
||||
final jlptLevel =
|
||||
entryJlptTags
|
||||
.map((e) => JlptLevel.fromString(e['jlptLevel'] as String?))
|
||||
.sorted((a, b) => b.compareTo(a))
|
||||
.firstOrNull ??
|
||||
@@ -63,7 +50,7 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
|
||||
final isCommon = commonEntryIds.contains(scoredEntryId.entryId);
|
||||
|
||||
final List<Map<String, Object?>> entrySenses = senses
|
||||
final List<Map<String, Object?>> entrySenses = linearWordQueryData.senses
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
@@ -71,25 +58,28 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
entryId: scoredEntryId.entryId,
|
||||
readingElements: entryReadingElements,
|
||||
kanjiElements: entryKanjiElements,
|
||||
readingElementInfos: readingElementInfos,
|
||||
readingElementRestrictions: readingElementRestrictions,
|
||||
kanjiElementInfos: kanjiElementInfos,
|
||||
readingElementInfos: linearWordQueryData.readingElementInfos,
|
||||
readingElementRestrictions:
|
||||
linearWordQueryData.readingElementRestrictions,
|
||||
kanjiElementInfos: linearWordQueryData.kanjiElementInfos,
|
||||
);
|
||||
|
||||
final List<WordSearchSense> entrySensesGrouped = _regroup_senses(
|
||||
senses: entrySenses,
|
||||
senseAntonyms: senseAntonyms,
|
||||
senseDialects: senseDialects,
|
||||
senseFields: senseFields,
|
||||
senseGlossaries: senseGlossaries,
|
||||
senseInfos: senseInfos,
|
||||
senseLanguageSources: senseLanguageSources,
|
||||
senseMiscs: senseMiscs,
|
||||
sensePOSs: sensePOSs,
|
||||
senseRestrictedToKanjis: senseRestrictedToKanjis,
|
||||
senseRestrictedToReadings: senseRestrictedToReadings,
|
||||
senseSeeAlsos: senseSeeAlsos,
|
||||
exampleSentences: exampleSentences,
|
||||
senseAntonyms: linearWordQueryData.senseAntonyms,
|
||||
senseDialects: linearWordQueryData.senseDialects,
|
||||
senseFields: linearWordQueryData.senseFields,
|
||||
senseGlossaries: linearWordQueryData.senseGlossaries,
|
||||
senseInfos: linearWordQueryData.senseInfos,
|
||||
senseLanguageSources: linearWordQueryData.senseLanguageSources,
|
||||
senseMiscs: linearWordQueryData.senseMiscs,
|
||||
sensePOSs: linearWordQueryData.sensePOSs,
|
||||
senseRestrictedToKanjis: linearWordQueryData.senseRestrictedToKanjis,
|
||||
senseRestrictedToReadings: linearWordQueryData.senseRestrictedToReadings,
|
||||
senseSeeAlsos: linearWordQueryData.senseSeeAlsos,
|
||||
exampleSentences: linearWordQueryData.exampleSentences,
|
||||
senseSeeAlsosXrefData: linearWordQueryData.senseSeeAlsoData,
|
||||
senseAntonymsXrefData: linearWordQueryData.senseAntonymData,
|
||||
);
|
||||
|
||||
results.add(
|
||||
@@ -102,10 +92,7 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
readingInfo: entryReadingElementsGrouped.readingInfos,
|
||||
senses: entrySensesGrouped,
|
||||
jlptLevel: jlptLevel,
|
||||
sources: const WordSearchSources(
|
||||
jmdict: true,
|
||||
jmnedict: false,
|
||||
),
|
||||
sources: const WordSearchSources(jmdict: true, jmnedict: false),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -135,8 +122,9 @@ GroupedWordResult _regroup_words({
|
||||
}) {
|
||||
final List<WordSearchRuby> rubys = [];
|
||||
|
||||
final kanjiElements_ =
|
||||
kanjiElements.where((element) => element['entryId'] == entryId).toList();
|
||||
final kanjiElements_ = kanjiElements
|
||||
.where((element) => element['entryId'] == entryId)
|
||||
.toList();
|
||||
|
||||
final readingElements_ = readingElements
|
||||
.where((element) => element['entryId'] == entryId)
|
||||
@@ -148,9 +136,7 @@ GroupedWordResult _regroup_words({
|
||||
|
||||
for (final readingElement in readingElements_) {
|
||||
if (readingElement['doesNotMatchKanji'] == 1 || kanjiElements_.isEmpty) {
|
||||
final ruby = WordSearchRuby(
|
||||
base: readingElement['reading'] as String,
|
||||
);
|
||||
final ruby = WordSearchRuby(base: readingElement['reading'] as String);
|
||||
rubys.add(ruby);
|
||||
|
||||
continue;
|
||||
@@ -169,18 +155,12 @@ GroupedWordResult _regroup_words({
|
||||
continue;
|
||||
}
|
||||
|
||||
final ruby = WordSearchRuby(
|
||||
base: kanji,
|
||||
furigana: reading,
|
||||
);
|
||||
final ruby = WordSearchRuby(base: kanji, furigana: reading);
|
||||
rubys.add(ruby);
|
||||
}
|
||||
}
|
||||
|
||||
assert(
|
||||
rubys.isNotEmpty,
|
||||
'No readings found for entryId: $entryId',
|
||||
);
|
||||
assert(rubys.isNotEmpty, 'No readings found for entryId: $entryId');
|
||||
|
||||
final Map<int, String> readingElementIdsToReading = {
|
||||
for (final element in readingElements_)
|
||||
@@ -210,7 +190,7 @@ GroupedWordResult _regroup_words({
|
||||
kanjiInfos: {
|
||||
for (final kei in kanjiElementInfos_)
|
||||
kanjiElementIdsToReading[kei['elementId'] as int]!:
|
||||
JMdictKanjiInfo.fromId(kei['info'] as String),
|
||||
JMdictKanjiInfo.fromId(kei['info'] as String),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -229,29 +209,41 @@ List<WordSearchSense> _regroup_senses({
|
||||
required List<Map<String, Object?>> senseRestrictedToReadings,
|
||||
required List<Map<String, Object?>> senseSeeAlsos,
|
||||
required List<Map<String, Object?>> exampleSentences,
|
||||
required LinearWordQueryData? senseSeeAlsosXrefData,
|
||||
required LinearWordQueryData? senseAntonymsXrefData,
|
||||
}) {
|
||||
final groupedSenseAntonyms =
|
||||
senseAntonyms.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseDialects =
|
||||
senseDialects.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseFields =
|
||||
senseFields.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseGlossaries =
|
||||
senseGlossaries.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseInfos =
|
||||
senseInfos.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseLanguageSources =
|
||||
senseLanguageSources.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseMiscs =
|
||||
senseMiscs.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSensePOSs =
|
||||
sensePOSs.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis
|
||||
.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseAntonyms = senseAntonyms.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseDialects = senseDialects.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseFields = senseFields.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseGlossaries = senseGlossaries.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseInfos = senseInfos.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseLanguageSources = senseLanguageSources.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseMiscs = senseMiscs.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSensePOSs = sensePOSs.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseRestrictedToReadings = senseRestrictedToReadings
|
||||
.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseSeeAlsos =
|
||||
senseSeeAlsos.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseSeeAlsos = senseSeeAlsos.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
|
||||
final List<WordSearchSense> result = [];
|
||||
for (final sense in senses) {
|
||||
@@ -270,45 +262,82 @@ List<WordSearchSense> _regroup_senses({
|
||||
groupedSenseRestrictedToReadings[senseId] ?? [];
|
||||
final seeAlsos = groupedSenseSeeAlsos[senseId] ?? [];
|
||||
|
||||
final List<WordSearchResult> seeAlsosWordResults =
|
||||
senseSeeAlsosXrefData != null
|
||||
? regroupWordSearchResults(
|
||||
entryIds: seeAlsos
|
||||
.map((e) => ScoredEntryId(e['xrefEntryId'] as int, 0))
|
||||
.toList(),
|
||||
linearWordQueryData: senseSeeAlsosXrefData,
|
||||
)
|
||||
: [];
|
||||
final List<WordSearchResult> antonymsWordResults =
|
||||
senseAntonymsXrefData != null
|
||||
? regroupWordSearchResults(
|
||||
entryIds: antonyms
|
||||
.map((e) => ScoredEntryId(e['xrefEntryId'] as int, 0))
|
||||
.toList(),
|
||||
linearWordQueryData: senseAntonymsXrefData,
|
||||
)
|
||||
: [];
|
||||
|
||||
final resultSense = WordSearchSense(
|
||||
englishDefinitions: glossaries.map((e) => e['phrase'] as String).toList(),
|
||||
partsOfSpeech:
|
||||
pos.map((e) => JMdictPOS.fromId(e['pos'] as String)).toList(),
|
||||
seeAlso: seeAlsos
|
||||
.map((e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
))
|
||||
partsOfSpeech: pos
|
||||
.map((e) => JMdictPOS.fromId(e['pos'] as String))
|
||||
.toList(),
|
||||
antonyms: antonyms
|
||||
.map((e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
))
|
||||
seeAlso: seeAlsos.asMap().entries.map<WordSearchXrefEntry>((mapEntry) {
|
||||
final i = mapEntry.key;
|
||||
final e = mapEntry.value;
|
||||
|
||||
return WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
xrefResult: seeAlsosWordResults.isNotEmpty
|
||||
? seeAlsosWordResults[i]
|
||||
: null,
|
||||
);
|
||||
}).toList(),
|
||||
antonyms: antonyms.asMap().entries.map<WordSearchXrefEntry>((mapEntry) {
|
||||
final i = mapEntry.key;
|
||||
final e = mapEntry.value;
|
||||
|
||||
return WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
xrefResult: antonymsWordResults.isNotEmpty
|
||||
? antonymsWordResults[i]
|
||||
: null,
|
||||
);
|
||||
}).toList(),
|
||||
restrictedToReading: restrictedToReadings
|
||||
.map((e) => e['reading'] as String)
|
||||
.toList(),
|
||||
restrictedToKanji: restrictedToKanjis
|
||||
.map((e) => e['kanji'] as String)
|
||||
.toList(),
|
||||
fields: fields
|
||||
.map((e) => JMdictField.fromId(e['field'] as String))
|
||||
.toList(),
|
||||
restrictedToReading:
|
||||
restrictedToReadings.map((e) => e['reading'] as String).toList(),
|
||||
restrictedToKanji:
|
||||
restrictedToKanjis.map((e) => e['kanji'] as String).toList(),
|
||||
fields:
|
||||
fields.map((e) => JMdictField.fromId(e['field'] as String)).toList(),
|
||||
dialects: dialects
|
||||
.map((e) => JMdictDialect.fromId(e['dialect'] as String))
|
||||
.toList(),
|
||||
misc: miscs.map((e) => JMdictMisc.fromId(e['misc'] as String)).toList(),
|
||||
info: infos.map((e) => e['info'] as String).toList(),
|
||||
languageSource: languageSources
|
||||
.map((e) => WordSearchSenseLanguageSource(
|
||||
language: e['language'] as String,
|
||||
phrase: e['phrase'] as String?,
|
||||
fullyDescribesSense: e['fullyDescribesSense'] == 1,
|
||||
constructedFromSmallerWords:
|
||||
e['constructedFromSmallerWords'] == 1,
|
||||
))
|
||||
.map(
|
||||
(e) => WordSearchSenseLanguageSource(
|
||||
language: e['language'] as String,
|
||||
phrase: e['phrase'] as String?,
|
||||
fullyDescribesSense: e['fullyDescribesSense'] == 1,
|
||||
constructedFromSmallerWords:
|
||||
e['constructedFromSmallerWords'] == 1,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
|
||||
|
||||
@@ -13,27 +13,20 @@ import 'package:jadb/search/word_search/regrouping.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
enum SearchMode {
|
||||
Auto,
|
||||
English,
|
||||
Kanji,
|
||||
MixedKanji,
|
||||
Kana,
|
||||
MixedKana,
|
||||
}
|
||||
enum SearchMode { Auto, English, Kanji, MixedKanji, Kana, MixedKana }
|
||||
|
||||
Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
SearchMode searchMode,
|
||||
int page,
|
||||
int pageSize,
|
||||
) async {
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
int page = 0,
|
||||
int? pageSize,
|
||||
}) async {
|
||||
if (word.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final offset = page * pageSize;
|
||||
final int? offset = pageSize != null ? page * pageSize : null;
|
||||
final List<ScoredEntryId> entryIds = await fetchEntryIds(
|
||||
connection,
|
||||
word,
|
||||
@@ -49,32 +42,13 @@ Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
|
||||
final LinearWordQueryData linearWordQueryData =
|
||||
await fetchLinearWordQueryData(
|
||||
connection,
|
||||
entryIds.map((e) => e.entryId).toList(),
|
||||
);
|
||||
connection,
|
||||
entryIds.map((e) => e.entryId).toList(),
|
||||
);
|
||||
|
||||
final result = regroupWordSearchResults(
|
||||
entryIds: entryIds,
|
||||
readingElements: linearWordQueryData.readingElements,
|
||||
kanjiElements: linearWordQueryData.kanjiElements,
|
||||
jlptTags: linearWordQueryData.jlptTags,
|
||||
commonEntries: linearWordQueryData.commonEntries,
|
||||
senses: linearWordQueryData.senses,
|
||||
senseAntonyms: linearWordQueryData.senseAntonyms,
|
||||
senseDialects: linearWordQueryData.senseDialects,
|
||||
senseFields: linearWordQueryData.senseFields,
|
||||
senseGlossaries: linearWordQueryData.senseGlossaries,
|
||||
senseInfos: linearWordQueryData.senseInfos,
|
||||
senseLanguageSources: linearWordQueryData.senseLanguageSources,
|
||||
senseMiscs: linearWordQueryData.senseMiscs,
|
||||
sensePOSs: linearWordQueryData.sensePOSs,
|
||||
senseRestrictedToKanjis: linearWordQueryData.senseRestrictedToKanjis,
|
||||
senseRestrictedToReadings: linearWordQueryData.senseRestrictedToReadings,
|
||||
senseSeeAlsos: linearWordQueryData.senseSeeAlsos,
|
||||
exampleSentences: linearWordQueryData.exampleSentences,
|
||||
readingElementInfos: linearWordQueryData.readingElementInfos,
|
||||
readingElementRestrictions: linearWordQueryData.readingElementRestrictions,
|
||||
kanjiElementInfos: linearWordQueryData.kanjiElementInfos,
|
||||
linearWordQueryData: linearWordQueryData,
|
||||
);
|
||||
|
||||
return result;
|
||||
@@ -82,9 +56,9 @@ Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
|
||||
Future<int?> searchWordCountWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
SearchMode searchMode,
|
||||
) async {
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
}) async {
|
||||
if (word.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
@@ -106,43 +80,23 @@ Future<WordSearchResult?> getWordByIdWithDbConnection(
|
||||
return null;
|
||||
}
|
||||
|
||||
final exists = await connection.rawQuery(
|
||||
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
|
||||
[id],
|
||||
).then((value) => value.isNotEmpty && value.first.values.first == 1);
|
||||
final exists = await connection
|
||||
.rawQuery(
|
||||
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
|
||||
[id],
|
||||
)
|
||||
.then((value) => value.isNotEmpty && value.first.values.first == 1);
|
||||
|
||||
if (!exists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final LinearWordQueryData linearWordQueryData =
|
||||
await fetchLinearWordQueryData(
|
||||
connection,
|
||||
[id],
|
||||
);
|
||||
await fetchLinearWordQueryData(connection, [id]);
|
||||
|
||||
final result = regroupWordSearchResults(
|
||||
entryIds: [ScoredEntryId(id, 0)],
|
||||
readingElements: linearWordQueryData.readingElements,
|
||||
kanjiElements: linearWordQueryData.kanjiElements,
|
||||
jlptTags: linearWordQueryData.jlptTags,
|
||||
commonEntries: linearWordQueryData.commonEntries,
|
||||
senses: linearWordQueryData.senses,
|
||||
senseAntonyms: linearWordQueryData.senseAntonyms,
|
||||
senseDialects: linearWordQueryData.senseDialects,
|
||||
senseFields: linearWordQueryData.senseFields,
|
||||
senseGlossaries: linearWordQueryData.senseGlossaries,
|
||||
senseInfos: linearWordQueryData.senseInfos,
|
||||
senseLanguageSources: linearWordQueryData.senseLanguageSources,
|
||||
senseMiscs: linearWordQueryData.senseMiscs,
|
||||
sensePOSs: linearWordQueryData.sensePOSs,
|
||||
senseRestrictedToKanjis: linearWordQueryData.senseRestrictedToKanjis,
|
||||
senseRestrictedToReadings: linearWordQueryData.senseRestrictedToReadings,
|
||||
senseSeeAlsos: linearWordQueryData.senseSeeAlsos,
|
||||
exampleSentences: linearWordQueryData.exampleSentences,
|
||||
readingElementInfos: linearWordQueryData.readingElementInfos,
|
||||
readingElementRestrictions: linearWordQueryData.readingElementRestrictions,
|
||||
kanjiElementInfos: linearWordQueryData.kanjiElementInfos,
|
||||
linearWordQueryData: linearWordQueryData,
|
||||
);
|
||||
|
||||
assert(
|
||||
@@ -152,3 +106,26 @@ Future<WordSearchResult?> getWordByIdWithDbConnection(
|
||||
|
||||
return result.firstOrNull;
|
||||
}
|
||||
|
||||
Future<Map<int, WordSearchResult>> getWordsByIdsWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
Set<int> ids,
|
||||
) async {
|
||||
if (ids.isEmpty) {
|
||||
return {};
|
||||
}
|
||||
|
||||
final LinearWordQueryData linearWordQueryData =
|
||||
await fetchLinearWordQueryData(connection, ids.toList());
|
||||
|
||||
final List<ScoredEntryId> entryIds = ids
|
||||
.map((id) => ScoredEntryId(id, 0)) // Score is not used here
|
||||
.toList();
|
||||
|
||||
final results = regroupWordSearchResults(
|
||||
entryIds: entryIds,
|
||||
linearWordQueryData: linearWordQueryData,
|
||||
);
|
||||
|
||||
return {for (var r in results) r.entryId: r};
|
||||
}
|
||||
|
||||
@@ -20,23 +20,23 @@ abstract class JMdictTableNames {
|
||||
static const String senseSeeAlso = 'JMdict_SenseSeeAlso';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
entry,
|
||||
kanjiElement,
|
||||
kanjiInfo,
|
||||
readingElement,
|
||||
readingInfo,
|
||||
readingRestriction,
|
||||
sense,
|
||||
senseAntonyms,
|
||||
senseDialect,
|
||||
senseField,
|
||||
senseGlossary,
|
||||
senseInfo,
|
||||
senseMisc,
|
||||
sensePOS,
|
||||
senseLanguageSource,
|
||||
senseRestrictedToKanji,
|
||||
senseRestrictedToReading,
|
||||
senseSeeAlso
|
||||
};
|
||||
entry,
|
||||
kanjiElement,
|
||||
kanjiInfo,
|
||||
readingElement,
|
||||
readingInfo,
|
||||
readingRestriction,
|
||||
sense,
|
||||
senseAntonyms,
|
||||
senseDialect,
|
||||
senseField,
|
||||
senseGlossary,
|
||||
senseInfo,
|
||||
senseMisc,
|
||||
sensePOS,
|
||||
senseLanguageSource,
|
||||
senseRestrictedToKanji,
|
||||
senseRestrictedToReading,
|
||||
senseSeeAlso,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,19 +17,19 @@ abstract class KANJIDICTableNames {
|
||||
static const String nanori = 'KANJIDIC_Nanori';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
character,
|
||||
radicalName,
|
||||
codepoint,
|
||||
radical,
|
||||
strokeMiscount,
|
||||
variant,
|
||||
dictionaryReference,
|
||||
dictionaryReferenceMoro,
|
||||
queryCode,
|
||||
reading,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
meaning,
|
||||
nanori
|
||||
};
|
||||
character,
|
||||
radicalName,
|
||||
codepoint,
|
||||
radical,
|
||||
strokeMiscount,
|
||||
variant,
|
||||
dictionaryReference,
|
||||
dictionaryReferenceMoro,
|
||||
queryCode,
|
||||
reading,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
meaning,
|
||||
nanori,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
abstract class RADKFILETableNames {
|
||||
static const String radkfile = 'RADKFILE';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
radkfile,
|
||||
};
|
||||
static Set<String> get allTables => {radkfile};
|
||||
}
|
||||
|
||||
@@ -276,29 +276,22 @@ extension on DateTime {
|
||||
/// See more info here:
|
||||
/// - https://en.wikipedia.org/wiki/Nanboku-ch%C5%8D_period
|
||||
/// - http://www.kumamotokokufu-h.ed.jp/kumamoto/bungaku/nengoui.html
|
||||
String? japaneseEra({bool nanbokuchouPeriodUsesNorth = true}) {
|
||||
String? japaneseEra() {
|
||||
throw UnimplementedError('This function is not implemented yet.');
|
||||
|
||||
if (this.year < 645) {
|
||||
if (year < 645) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.year < periodsNanbokuchouNorth.keys.first.$1) {
|
||||
if (year < periodsNanbokuchouNorth.keys.first.$1) {
|
||||
// TODO: find first where year <= this.year and jump one period back.
|
||||
}
|
||||
}
|
||||
|
||||
String get japaneseWeekdayPrefix => [
|
||||
'月',
|
||||
'火',
|
||||
'水',
|
||||
'木',
|
||||
'金',
|
||||
'土',
|
||||
'日',
|
||||
][weekday - 1];
|
||||
String get japaneseWeekdayPrefix =>
|
||||
['月', '火', '水', '木', '金', '土', '日'][weekday - 1];
|
||||
|
||||
/// Returns the date in Japanese format.
|
||||
String japaneseDate({bool showWeekday = false}) =>
|
||||
'$month月$day日' + (showWeekday ? '($japaneseWeekdayPrefix)' : '');
|
||||
'$month月$day日${showWeekday ? '($japaneseWeekdayPrefix)' : ''}';
|
||||
}
|
||||
|
||||
@@ -12,10 +12,7 @@ enum WordClass {
|
||||
input,
|
||||
}
|
||||
|
||||
enum LemmatizationRuleType {
|
||||
prefix,
|
||||
suffix,
|
||||
}
|
||||
enum LemmatizationRuleType { prefix, suffix }
|
||||
|
||||
class LemmatizationRule {
|
||||
final String name;
|
||||
@@ -46,18 +43,18 @@ class LemmatizationRule {
|
||||
lookAheadBehind = const [''],
|
||||
LemmatizationRuleType type = LemmatizationRuleType.suffix,
|
||||
}) : this(
|
||||
name: name,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
pattern: replacement != null ? [replacement] : null
|
||||
},
|
||||
type: type,
|
||||
lookAheadBehind: lookAheadBehind,
|
||||
),
|
||||
validChildClasses: validChildClasses,
|
||||
terminal: terminal,
|
||||
wordClass: wordClass,
|
||||
);
|
||||
name: name,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
pattern: replacement != null ? [replacement] : null,
|
||||
},
|
||||
type: type,
|
||||
lookAheadBehind: lookAheadBehind,
|
||||
),
|
||||
validChildClasses: validChildClasses,
|
||||
terminal: terminal,
|
||||
wordClass: wordClass,
|
||||
);
|
||||
}
|
||||
|
||||
/// Represents a set of patterns for matching allomorphs in a word.
|
||||
@@ -132,8 +129,8 @@ class AllomorphPattern {
|
||||
if (word.startsWith(p as String)) {
|
||||
return patterns[affix] != null
|
||||
? patterns[affix]!
|
||||
.map((s) => s + word.substring(affix.length))
|
||||
.toList()
|
||||
.map((s) => s + word.substring(affix.length))
|
||||
.toList()
|
||||
: [word.substring(affix.length)];
|
||||
}
|
||||
break;
|
||||
@@ -186,7 +183,7 @@ class Lemmatized {
|
||||
@override
|
||||
String toString() {
|
||||
final childrenString = children
|
||||
.map((c) => ' - ' + c.toString().split('\n').join('\n '))
|
||||
.map((c) => ' - ${c.toString().split('\n').join('\n ')}')
|
||||
.join('\n');
|
||||
|
||||
if (children.isEmpty) {
|
||||
@@ -239,9 +236,6 @@ Lemmatized lemmatize(String word) {
|
||||
return Lemmatized(
|
||||
original: word,
|
||||
rule: inputRule,
|
||||
children: _lemmatize(
|
||||
inputRule,
|
||||
word,
|
||||
),
|
||||
children: _lemmatize(inputRule, word),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -481,9 +481,9 @@ const Map<String, String> latin_to_hiragana = {
|
||||
'#~': '〜',
|
||||
};
|
||||
|
||||
bool _smallTsu(String for_conversion) => for_conversion == hiragana_small_tsu;
|
||||
bool _nFollowedByYuYeYo(String for_conversion, String kana) =>
|
||||
for_conversion == hiragana_syllabic_n &&
|
||||
bool _smallTsu(String forConversion) => forConversion == hiragana_small_tsu;
|
||||
bool _nFollowedByYuYeYo(String forConversion, String kana) =>
|
||||
forConversion == hiragana_syllabic_n &&
|
||||
kana.length > 1 &&
|
||||
'やゆよ'.contains(kana.substring(1, 2));
|
||||
|
||||
@@ -495,17 +495,17 @@ String transliterateHiraganaToLatin(String hiragana) {
|
||||
while (kana.isNotEmpty) {
|
||||
final lengths = [if (kana.length > 1) 2, 1];
|
||||
for (final length in lengths) {
|
||||
final String for_conversion = kana.substring(0, length);
|
||||
final String forConversion = kana.substring(0, length);
|
||||
String? mora;
|
||||
|
||||
if (_smallTsu(for_conversion)) {
|
||||
if (_smallTsu(forConversion)) {
|
||||
geminate = true;
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
break;
|
||||
} else if (_nFollowedByYuYeYo(for_conversion, kana)) {
|
||||
} else if (_nFollowedByYuYeYo(forConversion, kana)) {
|
||||
mora = "n'";
|
||||
}
|
||||
mora ??= hiragana_to_latin[for_conversion];
|
||||
mora ??= hiragana_to_latin[forConversion];
|
||||
|
||||
if (mora != null) {
|
||||
if (geminate) {
|
||||
@@ -516,7 +516,7 @@ String transliterateHiraganaToLatin(String hiragana) {
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
break;
|
||||
} else if (length == 1) {
|
||||
romaji += for_conversion;
|
||||
romaji += forConversion;
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
}
|
||||
}
|
||||
@@ -524,48 +524,46 @@ String transliterateHiraganaToLatin(String hiragana) {
|
||||
return romaji;
|
||||
}
|
||||
|
||||
bool _doubleNFollowedByAIUEO(String for_conversion) =>
|
||||
RegExp(r'^nn[aiueo]$').hasMatch(for_conversion);
|
||||
bool _hasTableMatch(String for_conversion) =>
|
||||
latin_to_hiragana[for_conversion] != null;
|
||||
bool _hasDoubleConsonant(String for_conversion, int length) =>
|
||||
for_conversion == 'tch' ||
|
||||
bool _doubleNFollowedByAIUEO(String forConversion) =>
|
||||
RegExp(r'^nn[aiueo]$').hasMatch(forConversion);
|
||||
bool _hasTableMatch(String forConversion) =>
|
||||
latin_to_hiragana[forConversion] != null;
|
||||
bool _hasDoubleConsonant(String forConversion, int length) =>
|
||||
forConversion == 'tch' ||
|
||||
(length == 2 &&
|
||||
RegExp(r'^([kgsztdnbpmyrlwchf])\1$').hasMatch(for_conversion));
|
||||
RegExp(r'^([kgsztdnbpmyrlwchf])\1$').hasMatch(forConversion));
|
||||
|
||||
String transliterateLatinToHiragana(String latin) {
|
||||
String romaji =
|
||||
latin.toLowerCase().replaceAll('mb', 'nb').replaceAll('mp', 'np');
|
||||
String romaji = latin
|
||||
.toLowerCase()
|
||||
.replaceAll('mb', 'nb')
|
||||
.replaceAll('mp', 'np');
|
||||
String kana = '';
|
||||
|
||||
while (romaji.isNotEmpty) {
|
||||
final lengths = [
|
||||
if (romaji.length > 2) 3,
|
||||
if (romaji.length > 1) 2,
|
||||
1,
|
||||
];
|
||||
final lengths = [if (romaji.length > 2) 3, if (romaji.length > 1) 2, 1];
|
||||
|
||||
for (final length in lengths) {
|
||||
String? mora;
|
||||
int for_removal = length;
|
||||
final String for_conversion = romaji.substring(0, length);
|
||||
int forRemoval = length;
|
||||
final String forConversion = romaji.substring(0, length);
|
||||
|
||||
if (_doubleNFollowedByAIUEO(for_conversion)) {
|
||||
if (_doubleNFollowedByAIUEO(forConversion)) {
|
||||
mora = hiragana_syllabic_n;
|
||||
for_removal = 1;
|
||||
} else if (_hasTableMatch(for_conversion)) {
|
||||
mora = latin_to_hiragana[for_conversion];
|
||||
} else if (_hasDoubleConsonant(for_conversion, length)) {
|
||||
forRemoval = 1;
|
||||
} else if (_hasTableMatch(forConversion)) {
|
||||
mora = latin_to_hiragana[forConversion];
|
||||
} else if (_hasDoubleConsonant(forConversion, length)) {
|
||||
mora = hiragana_small_tsu;
|
||||
for_removal = 1;
|
||||
forRemoval = 1;
|
||||
}
|
||||
|
||||
if (mora != null) {
|
||||
kana += mora;
|
||||
romaji = romaji.replaceRange(0, for_removal, '');
|
||||
romaji = romaji.replaceRange(0, forRemoval, '');
|
||||
break;
|
||||
} else if (length == 1) {
|
||||
kana += for_conversion;
|
||||
kana += forConversion;
|
||||
romaji = romaji.replaceRange(0, 1, '');
|
||||
}
|
||||
}
|
||||
@@ -579,11 +577,11 @@ String _transposeCodepointsInRange(
|
||||
int distance,
|
||||
int rangeStart,
|
||||
int rangeEnd,
|
||||
) =>
|
||||
String.fromCharCodes(
|
||||
text.codeUnits
|
||||
.map((c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0)),
|
||||
);
|
||||
) => String.fromCharCodes(
|
||||
text.codeUnits.map(
|
||||
(c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0),
|
||||
),
|
||||
);
|
||||
|
||||
String transliterateKanaToLatin(String kana) =>
|
||||
transliterateHiraganaToLatin(transliterateKatakanaToHiragana(kana));
|
||||
@@ -599,12 +597,7 @@ String transliterateHiraganaToKatakana(String hiragana) =>
|
||||
|
||||
String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
|
||||
_transposeCodepointsInRange(
|
||||
_transposeCodepointsInRange(
|
||||
halfwidth,
|
||||
-65248,
|
||||
65281,
|
||||
65374,
|
||||
),
|
||||
_transposeCodepointsInRange(halfwidth, -65248, 65281, 65374),
|
||||
-12256,
|
||||
12288,
|
||||
12288,
|
||||
@@ -612,12 +605,7 @@ String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
|
||||
|
||||
String transliterateHalfwidthRomajiToFullwidth(String halfwidth) =>
|
||||
_transposeCodepointsInRange(
|
||||
_transposeCodepointsInRange(
|
||||
halfwidth,
|
||||
65248,
|
||||
33,
|
||||
126,
|
||||
),
|
||||
_transposeCodepointsInRange(halfwidth, 65248, 33, 126),
|
||||
12256,
|
||||
32,
|
||||
32,
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
String escapeStringValue(String value) {
|
||||
return "'" + value.replaceAll("'", "''") + "'";
|
||||
return "'${value.replaceAll("'", "''")}'";
|
||||
}
|
||||
|
||||
58
pubspec.lock
58
pubspec.lock
@@ -5,18 +5,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: _fe_analyzer_shared
|
||||
sha256: e55636ed79578b9abca5fecf9437947798f5ef7456308b5cb85720b793eac92f
|
||||
sha256: da0d9209ca76bde579f2da330aeb9df62b6319c834fa7baae052021b0462401f
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "82.0.0"
|
||||
version: "85.0.0"
|
||||
analyzer:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: analyzer
|
||||
sha256: "904ae5bb474d32c38fb9482e2d925d5454cda04ddd0e55d2e6826bc72f6ba8c0"
|
||||
sha256: b1ade5707ab7a90dfd519eaac78a7184341d19adb6096c68d499b59c7c6cf880
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "7.4.5"
|
||||
version: "7.7.0"
|
||||
args:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@@ -69,10 +69,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: coverage
|
||||
sha256: "802bd084fb82e55df091ec8ad1553a7331b61c08251eef19a508b6f3f3a9858d"
|
||||
sha256: "5da775aa218eaf2151c721b16c01c7676fbfdd99cebba2bf64e8b807a28ff94d"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.13.1"
|
||||
version: "1.15.0"
|
||||
crypto:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -165,10 +165,10 @@ packages:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: lints
|
||||
sha256: c35bb79562d980e9a453fc715854e1ed39e24e7d0297a880ef54e17f9874a9d7
|
||||
sha256: a5e2b223cb7c9c8efdc663ef484fdd95bb243bff242ef5b13e26883547fce9a0
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "5.1.1"
|
||||
version: "6.0.0"
|
||||
logging:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -218,7 +218,7 @@ packages:
|
||||
source: hosted
|
||||
version: "2.2.0"
|
||||
path:
|
||||
dependency: transitive
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: path
|
||||
sha256: "75cca69d1490965be98c73ceaea117e8a04dd21217b37b292c9ddbec0d955bc5"
|
||||
@@ -229,10 +229,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: petitparser
|
||||
sha256: "07c8f0b1913bcde1ff0d26e57ace2f3012ccbf2b204e070290dad3bb22797646"
|
||||
sha256: "9436fe11f82d7cc1642a8671e5aa4149ffa9ae9116e6cf6dd665fc0653e3825c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "6.1.0"
|
||||
version: "7.0.0"
|
||||
pool:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -317,18 +317,18 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: sqflite_common_ffi
|
||||
sha256: "1f3ef3888d3bfbb47785cc1dda0dc7dd7ebd8c1955d32a9e8e9dae1e38d1c4c1"
|
||||
sha256: "9faa2fedc5385ef238ce772589f7718c24cdddd27419b609bb9c6f703ea27988"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.5"
|
||||
version: "2.3.6"
|
||||
sqlite3:
|
||||
dependency: transitive
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: sqlite3
|
||||
sha256: "310af39c40dd0bb2058538333c9d9840a2725ae0b9f77e4fd09ad6696aa8f66e"
|
||||
sha256: "608b56d594e4c8498c972c8f1507209f9fd74939971b948ddbbfbfd1c9cb3c15"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.7.5"
|
||||
version: "2.7.7"
|
||||
stack_trace:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -357,10 +357,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: synchronized
|
||||
sha256: "0669c70faae6270521ee4f05bffd2919892d42d1276e6c495be80174b6bc0ef6"
|
||||
sha256: c254ade258ec8282947a0acbbc90b9575b4f19673533ee46f2f6e9b3aeefd7c0
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.3.1"
|
||||
version: "3.4.0"
|
||||
term_glyph:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -373,10 +373,10 @@ packages:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: test
|
||||
sha256: "0561f3a2cfd33d10232360f16dfcab9351cfb7ad9b23e6cd6e8c7fb0d62c7ac3"
|
||||
sha256: "65e29d831719be0591f7b3b1a32a3cda258ec98c58c7b25f7b84241bc31215bb"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.26.1"
|
||||
version: "1.26.2"
|
||||
test_api:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -389,10 +389,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_core
|
||||
sha256: "8619a9a45be044b71fe2cd6b77b54fd60f1c67904c38d48706e2852a2bda1c60"
|
||||
sha256: "80bf5a02b60af04b09e14f6fe68b921aad119493e26e490deaca5993fef1b05a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.6.10"
|
||||
version: "0.6.11"
|
||||
typed_data:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -405,18 +405,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: vm_service
|
||||
sha256: ddfa8d30d89985b96407efce8acbdd124701f96741f2d981ca860662f1c0dc02
|
||||
sha256: "45caa6c5917fa127b5dbcfbd1fa60b14e583afdc08bfc96dda38886ca252eb60"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "15.0.0"
|
||||
version: "15.0.2"
|
||||
watcher:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: watcher
|
||||
sha256: "69da27e49efa56a15f8afe8f4438c4ec02eff0a117df1b22ea4aad194fe1c104"
|
||||
sha256: "0b7fd4a0bbc4b92641dbf20adfd7e3fd1398fe17102d94b674234563e110088a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.1.1"
|
||||
version: "1.1.2"
|
||||
web:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -453,10 +453,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: xml
|
||||
sha256: b015a8ad1c488f66851d762d3090a21c600e479dc75e68328c52774040cf9226
|
||||
sha256: "3202a47961c1a0af6097c9f8c1b492d705248ba309e6f7a72410422c05046851"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "6.5.0"
|
||||
version: "6.6.0"
|
||||
yaml:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -466,4 +466,4 @@ packages:
|
||||
source: hosted
|
||||
version: "3.1.3"
|
||||
sdks:
|
||||
dart: ">=3.7.0 <4.0.0"
|
||||
dart: ">=3.8.0 <4.0.0"
|
||||
|
||||
@@ -4,19 +4,21 @@ version: 1.0.0
|
||||
homepage: https://git.pvv.ntnu.no/oysteikt/jadb
|
||||
|
||||
environment:
|
||||
sdk: '>=3.2.0 <4.0.0'
|
||||
sdk: '^3.8.0'
|
||||
|
||||
dependencies:
|
||||
args: ^2.7.0
|
||||
collection: ^1.19.0
|
||||
csv: ^6.0.0
|
||||
equatable: ^2.0.0
|
||||
path: ^1.9.1
|
||||
sqflite_common: ^2.5.0
|
||||
sqflite_common_ffi: ^2.3.0
|
||||
sqlite3: ^2.7.7
|
||||
xml: ^6.5.0
|
||||
|
||||
dev_dependencies:
|
||||
lints: ^5.0.0
|
||||
lints: ^6.0.0
|
||||
test: ^1.25.15
|
||||
|
||||
executables:
|
||||
|
||||
@@ -3,7 +3,7 @@ import 'package:jadb/const_data/kanji_grades.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test("Assert 2136 kanji in jouyou set", () {
|
||||
test('Assert 2136 kanji in jouyou set', () {
|
||||
expect(JOUYOU_KANJI_BY_GRADES.values.flattenedToSet.length, 2136);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -4,26 +4,26 @@ import 'dart:io';
|
||||
import 'package:jadb/models/create_empty_db.dart';
|
||||
import 'package:jadb/search.dart';
|
||||
import 'package:sqflite_common_ffi/sqflite_ffi.dart';
|
||||
import 'package:test/test.dart';
|
||||
import 'package:sqlite3/open.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
Future<DatabaseExecutor> setup_inmemory_database() async {
|
||||
final libsqlitePath = Platform.environment['LIBSQLITE_PATH'];
|
||||
|
||||
if (libsqlitePath == null) {
|
||||
throw Exception("LIBSQLITE_PATH is not set");
|
||||
throw Exception('LIBSQLITE_PATH is not set');
|
||||
}
|
||||
|
||||
final db_connection = await createDatabaseFactoryFfi(
|
||||
final dbConnection = await createDatabaseFactoryFfi(
|
||||
ffiInit: () =>
|
||||
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath)),
|
||||
).openDatabase(':memory:');
|
||||
|
||||
return db_connection;
|
||||
return dbConnection;
|
||||
}
|
||||
|
||||
void main() {
|
||||
test("Create empty db", () async {
|
||||
test('Create empty db', () async {
|
||||
final db = await setup_inmemory_database();
|
||||
|
||||
await createEmptyDb(db);
|
||||
|
||||
@@ -4,29 +4,26 @@ import 'package:test/test.dart';
|
||||
import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test("Filter kanji", () async {
|
||||
test('Filter kanji', () async {
|
||||
final connection = await setup_database_connection();
|
||||
|
||||
final result = await connection.filterKanji(
|
||||
[
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"漢",
|
||||
"字",
|
||||
"地",
|
||||
"字",
|
||||
"か",
|
||||
"な",
|
||||
".",
|
||||
"!",
|
||||
"@",
|
||||
";",
|
||||
"々",
|
||||
],
|
||||
deduplicate: false,
|
||||
);
|
||||
final result = await connection.filterKanji([
|
||||
'a',
|
||||
'b',
|
||||
'c',
|
||||
'漢',
|
||||
'字',
|
||||
'地',
|
||||
'字',
|
||||
'か',
|
||||
'な',
|
||||
'.',
|
||||
'!',
|
||||
'@',
|
||||
';',
|
||||
'々',
|
||||
], deduplicate: false);
|
||||
|
||||
expect(result.join(), "漢字地字");
|
||||
expect(result.join(), '漢字地字');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5,16 +5,16 @@ import 'package:test/test.dart';
|
||||
import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test("Search a kanji", () async {
|
||||
test('Search a kanji', () async {
|
||||
final connection = await setup_database_connection();
|
||||
|
||||
final result = await connection.jadbSearchKanji('漢');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
group("Search all jouyou kanji", () {
|
||||
group('Search all jouyou kanji', () {
|
||||
JOUYOU_KANJI_BY_GRADES.forEach((grade, characters) {
|
||||
test("Search all kanji in grade $grade", () async {
|
||||
test('Search all kanji in grade $grade', () async {
|
||||
final connection = await setup_database_connection();
|
||||
|
||||
for (final character in characters) {
|
||||
|
||||
@@ -4,21 +4,21 @@ import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<Database> setup_database_connection() async {
|
||||
final lib_sqlite_path = Platform.environment['LIBSQLITE_PATH'];
|
||||
final jadb_path = Platform.environment['JADB_PATH'];
|
||||
final libSqlitePath = Platform.environment['LIBSQLITE_PATH'];
|
||||
final jadbPath = Platform.environment['JADB_PATH'];
|
||||
|
||||
if (lib_sqlite_path == null) {
|
||||
throw Exception("LIBSQLITE_PATH is not set");
|
||||
if (libSqlitePath == null) {
|
||||
throw Exception('LIBSQLITE_PATH is not set');
|
||||
}
|
||||
|
||||
if (jadb_path == null) {
|
||||
throw Exception("JADB_PATH is not set");
|
||||
if (jadbPath == null) {
|
||||
throw Exception('JADB_PATH is not set');
|
||||
}
|
||||
|
||||
final db_connection = await openLocalDb(
|
||||
libsqlitePath: lib_sqlite_path,
|
||||
jadbPath: jadb_path,
|
||||
final dbConnection = await openLocalDb(
|
||||
libsqlitePath: libSqlitePath,
|
||||
jadbPath: jadbPath,
|
||||
);
|
||||
|
||||
return db_connection;
|
||||
return dbConnection;
|
||||
}
|
||||
|
||||
@@ -4,59 +4,59 @@ import 'package:test/test.dart';
|
||||
import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test("Search a word - english - auto", () async {
|
||||
test('Search a word - english - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWord("kana");
|
||||
final result = await connection.jadbSearchWord('kana');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Get word search count - english - auto", () async {
|
||||
test('Get word search count - english - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWordCount("kana");
|
||||
final result = await connection.jadbSearchWordCount('kana');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Search a word - japanese kana - auto", () async {
|
||||
test('Search a word - japanese kana - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWord("かな");
|
||||
final result = await connection.jadbSearchWord('かな');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Get word search count - japanese kana - auto", () async {
|
||||
test('Get word search count - japanese kana - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWordCount("かな");
|
||||
final result = await connection.jadbSearchWordCount('かな');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Search a word - japanese kanji - auto", () async {
|
||||
test('Search a word - japanese kanji - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWord("仮名");
|
||||
final result = await connection.jadbSearchWord('仮名');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Get word search count - japanese kanji - auto", () async {
|
||||
test('Get word search count - japanese kanji - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbSearchWordCount("仮名");
|
||||
final result = await connection.jadbSearchWordCount('仮名');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test("Get a word by id", () async {
|
||||
test('Get a word by id', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final result = await connection.jadbGetWordById(1577090);
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test(
|
||||
"Serialize all words",
|
||||
'Serialize all words',
|
||||
() async {
|
||||
final connection = await setup_database_connection();
|
||||
|
||||
// Test serializing all words
|
||||
for (final letter in "aiueoksthnmyrw".split("")) {
|
||||
for (final letter in 'aiueoksthnmyrw'.split('')) {
|
||||
await connection.jadbSearchWord(letter);
|
||||
}
|
||||
},
|
||||
timeout: Timeout.factor(100),
|
||||
skip: "Very slow test",
|
||||
skip: 'Very slow test',
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,65 +2,65 @@ import 'package:jadb/util/romaji_transliteration.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
group("Romaji -> Hiragana", () {
|
||||
test("Basic test", () {
|
||||
final result = transliterateLatinToHiragana("katamari");
|
||||
expect(result, "かたまり");
|
||||
group('Romaji -> Hiragana', () {
|
||||
test('Basic test', () {
|
||||
final result = transliterateLatinToHiragana('katamari');
|
||||
expect(result, 'かたまり');
|
||||
});
|
||||
|
||||
test("Basic test with diacritics", () {
|
||||
final result = transliterateLatinToHiragana("gadamari");
|
||||
expect(result, "がだまり");
|
||||
test('Basic test with diacritics', () {
|
||||
final result = transliterateLatinToHiragana('gadamari');
|
||||
expect(result, 'がだまり');
|
||||
});
|
||||
|
||||
test("wi and we", () {
|
||||
final result = transliterateLatinToHiragana("wiwe");
|
||||
expect(result, "うぃうぇ");
|
||||
test('wi and we', () {
|
||||
final result = transliterateLatinToHiragana('wiwe');
|
||||
expect(result, 'うぃうぇ');
|
||||
});
|
||||
|
||||
test("nb = mb", () {
|
||||
final result = transliterateLatinToHiragana("kanpai");
|
||||
expect(result, "かんぱい");
|
||||
test('nb = mb', () {
|
||||
final result = transliterateLatinToHiragana('kanpai');
|
||||
expect(result, 'かんぱい');
|
||||
|
||||
final result2 = transliterateLatinToHiragana("kampai");
|
||||
expect(result2, "かんぱい");
|
||||
final result2 = transliterateLatinToHiragana('kampai');
|
||||
expect(result2, 'かんぱい');
|
||||
});
|
||||
|
||||
test("Double n", () {
|
||||
final result = transliterateLatinToHiragana("konnichiha");
|
||||
expect(result, "こんにちは");
|
||||
test('Double n', () {
|
||||
final result = transliterateLatinToHiragana('konnichiha');
|
||||
expect(result, 'こんにちは');
|
||||
});
|
||||
|
||||
test("Double consonant", () {
|
||||
final result = transliterateLatinToHiragana("kappa");
|
||||
expect(result, "かっぱ");
|
||||
test('Double consonant', () {
|
||||
final result = transliterateLatinToHiragana('kappa');
|
||||
expect(result, 'かっぱ');
|
||||
});
|
||||
});
|
||||
|
||||
group("Hiragana -> Romaji", () {
|
||||
test("Basic test", () {
|
||||
final result = transliterateHiraganaToLatin("かたまり");
|
||||
expect(result, "katamari");
|
||||
group('Hiragana -> Romaji', () {
|
||||
test('Basic test', () {
|
||||
final result = transliterateHiraganaToLatin('かたまり');
|
||||
expect(result, 'katamari');
|
||||
});
|
||||
|
||||
test("Basic test with diacritics", () {
|
||||
final result = transliterateHiraganaToLatin("がだまり");
|
||||
expect(result, "gadamari");
|
||||
test('Basic test with diacritics', () {
|
||||
final result = transliterateHiraganaToLatin('がだまり');
|
||||
expect(result, 'gadamari');
|
||||
});
|
||||
|
||||
test("whi and whe", () {
|
||||
final result = transliterateHiraganaToLatin("うぃうぇ");
|
||||
expect(result, "whiwhe");
|
||||
test('whi and whe', () {
|
||||
final result = transliterateHiraganaToLatin('うぃうぇ');
|
||||
expect(result, 'whiwhe');
|
||||
});
|
||||
|
||||
test("Double n", () {
|
||||
final result = transliterateHiraganaToLatin("こんにちは");
|
||||
expect(result, "konnichiha");
|
||||
test('Double n', () {
|
||||
final result = transliterateHiraganaToLatin('こんにちは');
|
||||
expect(result, 'konnichiha');
|
||||
});
|
||||
|
||||
test("Double consonant", () {
|
||||
final result = transliterateHiraganaToLatin("かっぱ");
|
||||
expect(result, "kappa");
|
||||
test('Double consonant', () {
|
||||
final result = transliterateHiraganaToLatin('かっぱ');
|
||||
expect(result, 'kappa');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user