treewide: dart format
This commit is contained in:
@@ -17,13 +17,13 @@ abstract class Element extends SQLWritable {
|
||||
});
|
||||
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'reading': reading,
|
||||
'news': news,
|
||||
'ichi': ichi,
|
||||
'spec': spec,
|
||||
'gai': gai,
|
||||
'nf': nf,
|
||||
};
|
||||
'reading': reading,
|
||||
'news': news,
|
||||
'ichi': ichi,
|
||||
'spec': spec,
|
||||
'gai': gai,
|
||||
'nf': nf,
|
||||
};
|
||||
}
|
||||
|
||||
class KanjiElement extends Element {
|
||||
@@ -40,19 +40,19 @@ class KanjiElement extends Element {
|
||||
int? gai,
|
||||
int? nf,
|
||||
}) : super(
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
}
|
||||
|
||||
class ReadingElement extends Element {
|
||||
@@ -73,20 +73,20 @@ class ReadingElement extends Element {
|
||||
int? gai,
|
||||
int? nf,
|
||||
}) : super(
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
reading: reading,
|
||||
news: news,
|
||||
ichi: ichi,
|
||||
spec: spec,
|
||||
gai: gai,
|
||||
nf: nf,
|
||||
);
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
|
||||
};
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
|
||||
};
|
||||
}
|
||||
|
||||
class LanguageSource extends SQLWritable {
|
||||
@@ -104,11 +104,11 @@ class LanguageSource extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
}
|
||||
|
||||
class Glossary extends SQLWritable {
|
||||
@@ -116,48 +116,40 @@ class Glossary extends SQLWritable {
|
||||
final String phrase;
|
||||
final String? type;
|
||||
|
||||
const Glossary({
|
||||
required this.language,
|
||||
required this.phrase,
|
||||
this.type,
|
||||
});
|
||||
const Glossary({required this.language, required this.phrase, this.type});
|
||||
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'type': type,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'type': type,
|
||||
};
|
||||
}
|
||||
|
||||
final kanaRegex =
|
||||
RegExp(r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$', unicode: true);
|
||||
final kanaRegex = RegExp(
|
||||
r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$',
|
||||
unicode: true,
|
||||
);
|
||||
|
||||
class XRefParts {
|
||||
final String? kanjiRef;
|
||||
final String? readingRef;
|
||||
final int? senseOrderNum;
|
||||
|
||||
const XRefParts({
|
||||
this.kanjiRef,
|
||||
this.readingRef,
|
||||
this.senseOrderNum,
|
||||
}) : assert(kanjiRef != null || readingRef != null);
|
||||
const XRefParts({this.kanjiRef, this.readingRef, this.senseOrderNum})
|
||||
: assert(kanjiRef != null || readingRef != null);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'kanjiRef': kanjiRef,
|
||||
'readingRef': readingRef,
|
||||
'senseOrderNum': senseOrderNum,
|
||||
};
|
||||
'kanjiRef': kanjiRef,
|
||||
'readingRef': readingRef,
|
||||
'senseOrderNum': senseOrderNum,
|
||||
};
|
||||
}
|
||||
|
||||
class XRef {
|
||||
final String entryId;
|
||||
final String reading;
|
||||
|
||||
const XRef({
|
||||
required this.entryId,
|
||||
required this.reading,
|
||||
});
|
||||
const XRef({required this.entryId, required this.reading});
|
||||
}
|
||||
|
||||
class Sense extends SQLWritable {
|
||||
@@ -193,9 +185,9 @@ class Sense extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'senseId': senseId,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
'senseId': senseId,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
|
||||
bool get isEmpty =>
|
||||
antonyms.isEmpty &&
|
||||
|
||||
@@ -18,8 +18,9 @@ ResolvedXref resolveXref(
|
||||
XRefParts xref,
|
||||
) {
|
||||
List<Entry> candidateEntries = switch ((xref.kanjiRef, xref.readingRef)) {
|
||||
(null, null) =>
|
||||
throw Exception('Xref $xref has no kanji or reading reference'),
|
||||
(null, null) => throw Exception(
|
||||
'Xref $xref has no kanji or reading reference',
|
||||
),
|
||||
(String k, null) => entriesByKanji[k]!.toList(),
|
||||
(null, String r) => entriesByReading[r]!.toList(),
|
||||
(String k, String r) =>
|
||||
@@ -28,8 +29,9 @@ ResolvedXref resolveXref(
|
||||
|
||||
// Filter out entries that don't have the number of senses specified in the xref
|
||||
if (xref.senseOrderNum != null) {
|
||||
candidateEntries
|
||||
.retainWhere((entry) => entry.senses.length >= xref.senseOrderNum!);
|
||||
candidateEntries.retainWhere(
|
||||
(entry) => entry.senses.length >= xref.senseOrderNum!,
|
||||
);
|
||||
}
|
||||
|
||||
// If the xref has a reading ref but no kanji ref, and there are multiple
|
||||
@@ -38,8 +40,9 @@ ResolvedXref resolveXref(
|
||||
if (xref.kanjiRef == null &&
|
||||
xref.readingRef != null &&
|
||||
candidateEntries.length > 1) {
|
||||
final candidatesWithEmptyKanji =
|
||||
candidateEntries.where((entry) => entry.kanji.length == 0).toList();
|
||||
final candidatesWithEmptyKanji = candidateEntries
|
||||
.where((entry) => entry.kanji.length == 0)
|
||||
.toList();
|
||||
|
||||
if (candidatesWithEmptyKanji.isNotEmpty) {
|
||||
candidateEntries = candidatesWithEmptyKanji;
|
||||
@@ -80,20 +83,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.kanjiElement,
|
||||
k.sqlValue..addAll({
|
||||
'entryId': e.entryId,
|
||||
'elementId': elementId,
|
||||
}),
|
||||
k.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
|
||||
for (final i in k.info) {
|
||||
b.insert(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.kanjiInfo, {
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,29 +98,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.readingElement,
|
||||
r.sqlValue..addAll({
|
||||
'entryId': e.entryId,
|
||||
'elementId': elementId,
|
||||
}),
|
||||
r.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
|
||||
for (final i in r.info) {
|
||||
b.insert(
|
||||
JMdictTableNames.readingInfo,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.readingInfo, {
|
||||
'elementId': elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
for (final res in r.restrictions) {
|
||||
b.insert(
|
||||
JMdictTableNames.readingRestriction,
|
||||
{
|
||||
'elementId': elementId,
|
||||
'restriction': res,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.readingRestriction, {
|
||||
'elementId': elementId,
|
||||
'restriction': res,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -136,16 +124,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
for (final e in entries) {
|
||||
for (final s in e.senses) {
|
||||
b.insert(
|
||||
JMdictTableNames.sense, s.sqlValue..addAll({'entryId': e.entryId}));
|
||||
JMdictTableNames.sense,
|
||||
s.sqlValue..addAll({'entryId': e.entryId}),
|
||||
);
|
||||
for (final d in s.dialects) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseDialect,
|
||||
{'senseId': s.senseId, 'dialect': d},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseDialect, {
|
||||
'senseId': s.senseId,
|
||||
'dialect': d,
|
||||
});
|
||||
}
|
||||
for (final f in s.fields) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseField, {'senseId': s.senseId, 'field': f});
|
||||
b.insert(JMdictTableNames.senseField, {
|
||||
'senseId': s.senseId,
|
||||
'field': f,
|
||||
});
|
||||
}
|
||||
for (final i in s.info) {
|
||||
b.insert(JMdictTableNames.senseInfo, {'senseId': s.senseId, 'info': i});
|
||||
@@ -157,16 +149,18 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
b.insert(JMdictTableNames.sensePOS, {'senseId': s.senseId, 'pos': p});
|
||||
}
|
||||
for (final rk in s.restrictedToKanji) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
{'entryId': e.entryId, 'senseId': s.senseId, 'kanji': rk},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseRestrictedToKanji, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'kanji': rk,
|
||||
});
|
||||
}
|
||||
for (final rr in s.restrictedToReading) {
|
||||
b.insert(
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
{'entryId': e.entryId, 'senseId': s.senseId, 'reading': rr},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseRestrictedToReading, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'reading': rr,
|
||||
});
|
||||
}
|
||||
for (final ls in s.languageSource) {
|
||||
b.insert(
|
||||
@@ -220,17 +214,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
xref,
|
||||
);
|
||||
|
||||
b.insert(
|
||||
JMdictTableNames.senseSeeAlso,
|
||||
{
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'seeAlsoKanji': xref.kanjiRef,
|
||||
'seeAlsoReading': xref.readingRef,
|
||||
'seeAlsoSense': xref.senseOrderNum,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
},
|
||||
);
|
||||
b.insert(JMdictTableNames.senseSeeAlso, {
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'seeAlsoKanji': xref.kanjiRef,
|
||||
'seeAlsoReading': xref.readingRef,
|
||||
'seeAlsoSense': xref.senseOrderNum,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
});
|
||||
}
|
||||
|
||||
for (final ant in s.antonyms) {
|
||||
|
||||
@@ -16,7 +16,8 @@ List<int?> getPriorityValues(XmlElement e, String prefix) {
|
||||
spec = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('gai'))
|
||||
gai = int.parse(txt.substring(3));
|
||||
else if (txt.startsWith('nf')) nf = int.parse(txt.substring(2));
|
||||
else if (txt.startsWith('nf'))
|
||||
nf = int.parse(txt.substring(2));
|
||||
}
|
||||
return [news, ichi, spec, gai, nf];
|
||||
}
|
||||
@@ -46,10 +47,7 @@ XRefParts parseXrefParts(String s) {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
result = XRefParts(
|
||||
kanjiRef: parts[0],
|
||||
readingRef: parts[1],
|
||||
);
|
||||
result = XRefParts(kanjiRef: parts[0], readingRef: parts[1]);
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -102,8 +100,9 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
|
||||
for (final (orderNum, r_ele) in entry.findElements('r_ele').indexed) {
|
||||
final re_pri = getPriorityValues(r_ele, 're');
|
||||
final readingDoesNotMatchKanji =
|
||||
r_ele.findElements('re_nokanji').isNotEmpty;
|
||||
final readingDoesNotMatchKanji = r_ele
|
||||
.findElements('re_nokanji')
|
||||
.isNotEmpty;
|
||||
readingEls.add(
|
||||
ReadingElement(
|
||||
orderNum: orderNum + 1,
|
||||
@@ -112,8 +111,10 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
.findElements('re_inf')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
.toList(),
|
||||
restrictions:
|
||||
r_ele.findElements('re_restr').map((e) => e.innerText).toList(),
|
||||
restrictions: r_ele
|
||||
.findElements('re_restr')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
reading: r_ele.findElements('reb').first.innerText,
|
||||
news: re_pri[0],
|
||||
ichi: re_pri[1],
|
||||
@@ -129,10 +130,14 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
final result = Sense(
|
||||
senseId: senseId,
|
||||
orderNum: orderNum + 1,
|
||||
restrictedToKanji:
|
||||
sense.findElements('stagk').map((e) => e.innerText).toList(),
|
||||
restrictedToReading:
|
||||
sense.findElements('stagr').map((e) => e.innerText).toList(),
|
||||
restrictedToKanji: sense
|
||||
.findElements('stagk')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
restrictedToReading: sense
|
||||
.findElements('stagr')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
pos: sense
|
||||
.findElements('pos')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
|
||||
@@ -13,42 +13,33 @@ class CodePoint extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'codepoint': codepoint,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'codepoint': codepoint,
|
||||
};
|
||||
}
|
||||
|
||||
class Radical extends SQLWritable {
|
||||
final String kanji;
|
||||
final int radicalId;
|
||||
|
||||
const Radical({
|
||||
required this.kanji,
|
||||
required this.radicalId,
|
||||
});
|
||||
const Radical({required this.kanji, required this.radicalId});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'radicalId': radicalId,
|
||||
};
|
||||
Map<String, Object?> get sqlValue => {'kanji': kanji, 'radicalId': radicalId};
|
||||
}
|
||||
|
||||
class StrokeMiscount extends SQLWritable {
|
||||
final String kanji;
|
||||
final int strokeCount;
|
||||
|
||||
const StrokeMiscount({
|
||||
required this.kanji,
|
||||
required this.strokeCount,
|
||||
});
|
||||
const StrokeMiscount({required this.kanji, required this.strokeCount});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'strokeCount': strokeCount,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'strokeCount': strokeCount,
|
||||
};
|
||||
}
|
||||
|
||||
class Variant extends SQLWritable {
|
||||
@@ -64,10 +55,10 @@ class Variant extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'variant': variant,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'variant': variant,
|
||||
};
|
||||
}
|
||||
|
||||
class DictionaryReference extends SQLWritable {
|
||||
@@ -83,10 +74,10 @@ class DictionaryReference extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'ref': ref,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'ref': ref,
|
||||
};
|
||||
}
|
||||
|
||||
class DictionaryReferenceMoro extends SQLWritable {
|
||||
@@ -104,11 +95,11 @@ class DictionaryReferenceMoro extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'ref': ref,
|
||||
'volume': volume,
|
||||
'page': page,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'ref': ref,
|
||||
'volume': volume,
|
||||
'page': page,
|
||||
};
|
||||
}
|
||||
|
||||
class QueryCode extends SQLWritable {
|
||||
@@ -126,11 +117,11 @@ class QueryCode extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'code': code,
|
||||
'type': type,
|
||||
'skipMisclassification': skipMisclassification,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'code': code,
|
||||
'type': type,
|
||||
'skipMisclassification': skipMisclassification,
|
||||
};
|
||||
}
|
||||
|
||||
class Reading extends SQLWritable {
|
||||
@@ -146,10 +137,10 @@ class Reading extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'reading': reading,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'type': type,
|
||||
'reading': reading,
|
||||
};
|
||||
}
|
||||
|
||||
class Kunyomi extends SQLWritable {
|
||||
@@ -165,10 +156,10 @@ class Kunyomi extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
};
|
||||
}
|
||||
|
||||
class Onyomi extends SQLWritable {
|
||||
@@ -186,11 +177,11 @@ class Onyomi extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
'type': type,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'yomi': yomi,
|
||||
'isJouyou': isJouyou,
|
||||
'type': type,
|
||||
};
|
||||
}
|
||||
|
||||
class Meaning extends SQLWritable {
|
||||
@@ -206,10 +197,10 @@ class Meaning extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'kanji': kanji,
|
||||
'language': language,
|
||||
'meaning': meaning,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'language': language,
|
||||
'meaning': meaning,
|
||||
};
|
||||
}
|
||||
|
||||
class Character extends SQLWritable {
|
||||
@@ -255,10 +246,10 @@ class Character extends SQLWritable {
|
||||
});
|
||||
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'literal': literal,
|
||||
'grade': grade,
|
||||
'strokeCount': strokeCount,
|
||||
'frequency': frequency,
|
||||
'jlpt': jlpt,
|
||||
};
|
||||
'literal': literal,
|
||||
'grade': grade,
|
||||
'strokeCount': strokeCount,
|
||||
'frequency': frequency,
|
||||
'jlpt': jlpt,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -19,10 +19,7 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
assert(c.radical != null, 'Radical name without radical');
|
||||
b.insert(
|
||||
KANJIDICTableNames.radicalName,
|
||||
{
|
||||
'radicalId': c.radical!.radicalId,
|
||||
'name': n,
|
||||
},
|
||||
{'radicalId': c.radical!.radicalId, 'name': n},
|
||||
conflictAlgorithm: ConflictAlgorithm.ignore,
|
||||
);
|
||||
}
|
||||
@@ -34,13 +31,10 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
b.insert(KANJIDICTableNames.radical, c.radical!.sqlValue);
|
||||
}
|
||||
for (final sm in c.strokeMiscounts) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.strokeMiscount,
|
||||
{
|
||||
'kanji': c.literal,
|
||||
'strokeCount': sm,
|
||||
},
|
||||
);
|
||||
b.insert(KANJIDICTableNames.strokeMiscount, {
|
||||
'kanji': c.literal,
|
||||
'strokeCount': sm,
|
||||
});
|
||||
}
|
||||
for (final v in c.variants) {
|
||||
b.insert(KANJIDICTableNames.variant, v.sqlValue);
|
||||
@@ -64,24 +58,24 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
}
|
||||
for (final (i, y) in c.kunyomi.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.kunyomi, y.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.kunyomi,
|
||||
y.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final (i, y) in c.onyomi.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.onyomi, y.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.onyomi,
|
||||
y.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final (i, m) in c.meanings.indexed) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.meaning, m.sqlValue..addAll({'orderNum': i + 1}));
|
||||
KANJIDICTableNames.meaning,
|
||||
m.sqlValue..addAll({'orderNum': i + 1}),
|
||||
);
|
||||
}
|
||||
for (final n in c.nanori) {
|
||||
b.insert(
|
||||
KANJIDICTableNames.nanori,
|
||||
{
|
||||
'kanji': c.literal,
|
||||
'nanori': n,
|
||||
},
|
||||
);
|
||||
b.insert(KANJIDICTableNames.nanori, {'kanji': c.literal, 'nanori': n});
|
||||
}
|
||||
}
|
||||
await b.commit(noResult: true);
|
||||
|
||||
@@ -19,18 +19,24 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
result.add(
|
||||
Character(
|
||||
literal: kanji,
|
||||
strokeCount:
|
||||
int.parse(misc.findElements('stroke_count').first.innerText),
|
||||
strokeCount: int.parse(
|
||||
misc.findElements('stroke_count').first.innerText,
|
||||
),
|
||||
grade: int.tryParse(
|
||||
misc.findElements('grade').firstOrNull?.innerText ?? ''),
|
||||
misc.findElements('grade').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
frequency: int.tryParse(
|
||||
misc.findElements('freq').firstOrNull?.innerText ?? ''),
|
||||
misc.findElements('freq').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
jlpt: int.tryParse(
|
||||
misc.findElements('jlpt').firstOrNull?.innerText ?? '',
|
||||
),
|
||||
radicalName:
|
||||
misc.findElements('rad_name').map((e) => e.innerText).toList(),
|
||||
codepoints: codepoint
|
||||
radicalName: misc
|
||||
.findElements('rad_name')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
codepoints:
|
||||
codepoint
|
||||
?.findElements('cp_value')
|
||||
.map(
|
||||
(e) => CodePoint(
|
||||
@@ -45,10 +51,7 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
?.findElements('rad_value')
|
||||
.where((e) => e.getAttribute('rad_type') == 'classical')
|
||||
.map(
|
||||
(e) => Radical(
|
||||
kanji: kanji,
|
||||
radicalId: int.parse(e.innerText),
|
||||
),
|
||||
(e) => Radical(kanji: kanji, radicalId: int.parse(e.innerText)),
|
||||
)
|
||||
.firstOrNull,
|
||||
strokeMiscounts: misc
|
||||
@@ -66,7 +69,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
dictionaryReferences: dic_number
|
||||
dictionaryReferences:
|
||||
dic_number
|
||||
?.findElements('dic_ref')
|
||||
.where((e) => e.getAttribute('dr_type') != 'moro')
|
||||
.map(
|
||||
@@ -78,7 +82,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
dictionaryReferencesMoro: dic_number
|
||||
dictionaryReferencesMoro:
|
||||
dic_number
|
||||
?.findElements('dic_ref')
|
||||
.where((e) => e.getAttribute('dr_type') == 'moro')
|
||||
.map(
|
||||
@@ -102,7 +107,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
readings: reading_meaning
|
||||
readings:
|
||||
reading_meaning
|
||||
?.findAllElements('reading')
|
||||
.where(
|
||||
(e) =>
|
||||
@@ -117,7 +123,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
kunyomi: reading_meaning
|
||||
kunyomi:
|
||||
reading_meaning
|
||||
?.findAllElements('reading')
|
||||
.where((e) => e.getAttribute('r_type') == 'ja_kun')
|
||||
.map(
|
||||
@@ -129,19 +136,22 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
onyomi: reading_meaning
|
||||
onyomi:
|
||||
reading_meaning
|
||||
?.findAllElements('reading')
|
||||
.where((e) => e.getAttribute('r_type') == 'ja_on')
|
||||
.map(
|
||||
(e) => Onyomi(
|
||||
kanji: kanji,
|
||||
yomi: transliterateKatakanaToHiragana(e.innerText),
|
||||
isJouyou: e.getAttribute('r_status') == 'jy',
|
||||
type: e.getAttribute('on_type')),
|
||||
kanji: kanji,
|
||||
yomi: transliterateKatakanaToHiragana(e.innerText),
|
||||
isJouyou: e.getAttribute('r_status') == 'jy',
|
||||
type: e.getAttribute('on_type'),
|
||||
),
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
meanings: reading_meaning
|
||||
meanings:
|
||||
reading_meaning
|
||||
?.findAllElements('meaning')
|
||||
.map(
|
||||
(e) => Meaning(
|
||||
@@ -152,7 +162,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
|
||||
)
|
||||
.toList() ??
|
||||
[],
|
||||
nanori: reading_meaning
|
||||
nanori:
|
||||
reading_meaning
|
||||
?.findElements('nanori')
|
||||
.map((e) => e.innerText)
|
||||
.toList() ??
|
||||
|
||||
@@ -33,21 +33,22 @@ Future<Database> openLocalDb({
|
||||
throw Exception("JADB_PATH does not exist: $jadbPath");
|
||||
}
|
||||
|
||||
final db = await createDatabaseFactoryFfi(
|
||||
ffiInit: () =>
|
||||
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
|
||||
).openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute("PRAGMA journal_mode = WAL");
|
||||
}
|
||||
await db.execute("PRAGMA foreign_keys = ON");
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
final db =
|
||||
await createDatabaseFactoryFfi(
|
||||
ffiInit: () =>
|
||||
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
|
||||
).openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute("PRAGMA journal_mode = WAL");
|
||||
}
|
||||
await db.execute("PRAGMA foreign_keys = ON");
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
|
||||
if (verifyTablesExist) {
|
||||
await db.jadbVerifyTables();
|
||||
|
||||
@@ -3,8 +3,10 @@ import 'dart:io';
|
||||
Iterable<String> parseRADKFILEBlocks(File radkfile) {
|
||||
final String content = File('data/tmp/radkfile_utf8').readAsStringSync();
|
||||
|
||||
final Iterable<String> blocks =
|
||||
content.replaceAll(RegExp(r'^#.*$'), '').split(r'$').skip(2);
|
||||
final Iterable<String> blocks = content
|
||||
.replaceAll(RegExp(r'^#.*$'), '')
|
||||
.split(r'$')
|
||||
.skip(2);
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
@@ -1,27 +1,20 @@
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<void> seedRADKFILEData(
|
||||
Iterable<String> blocks,
|
||||
Database db,
|
||||
) async {
|
||||
Future<void> seedRADKFILEData(Iterable<String> blocks, Database db) async {
|
||||
final b = db.batch();
|
||||
|
||||
for (final block in blocks) {
|
||||
final String radical = block[1];
|
||||
final List<String> kanjiList = block
|
||||
.replaceFirst(RegExp(r'.*\n'), '')
|
||||
.split('')
|
||||
..removeWhere((e) => e == '' || e == '\n');
|
||||
final List<String> kanjiList =
|
||||
block.replaceFirst(RegExp(r'.*\n'), '').split('')
|
||||
..removeWhere((e) => e == '' || e == '\n');
|
||||
|
||||
for (final kanji in kanjiList.toSet()) {
|
||||
b.insert(
|
||||
RADKFILETableNames.radkfile,
|
||||
{
|
||||
'radical': radical,
|
||||
'kanji': kanji,
|
||||
},
|
||||
);
|
||||
b.insert(RADKFILETableNames.radkfile, {
|
||||
'radical': radical,
|
||||
'kanji': kanji,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,23 +31,27 @@ Future<List<JLPTRankedWord>> parseJLPTRankedWords(
|
||||
final kanji = (row[0] as String).isEmpty
|
||||
? null
|
||||
: (row[0] as String)
|
||||
.replaceFirst(RegExp('^お・'), '')
|
||||
.replaceAll(RegExp(r'(.*)'), '');
|
||||
.replaceFirst(RegExp('^お・'), '')
|
||||
.replaceAll(RegExp(r'(.*)'), '');
|
||||
|
||||
final readings = (row[1] as String)
|
||||
.split(RegExp('[・/、(:?\s+)]'))
|
||||
.map((e) => e.trim())
|
||||
.toList();
|
||||
|
||||
final meanings =
|
||||
(row[2] as String).split(',').expand(cleanMeaning).toList();
|
||||
final meanings = (row[2] as String)
|
||||
.split(',')
|
||||
.expand(cleanMeaning)
|
||||
.toList();
|
||||
|
||||
result.add(JLPTRankedWord(
|
||||
readings: readings,
|
||||
kanji: kanji,
|
||||
jlptLevel: jlptLevel,
|
||||
meanings: meanings,
|
||||
));
|
||||
result.add(
|
||||
JLPTRankedWord(
|
||||
readings: readings,
|
||||
kanji: kanji,
|
||||
jlptLevel: jlptLevel,
|
||||
meanings: meanings,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,47 +3,37 @@ import 'package:jadb/_data_ingestion/tanos-jlpt/objects.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/overrides.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<List<int>> _findReadingCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
columns: ['entryId'],
|
||||
where:
|
||||
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
|
||||
whereArgs: [...word.readings],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
Future<List<int>> _findReadingCandidates(JLPTRankedWord word, Database db) => db
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
columns: ['entryId'],
|
||||
where:
|
||||
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
|
||||
whereArgs: [...word.readings],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
|
||||
Future<List<int>> _findKanjiCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
columns: ['entryId'],
|
||||
where: 'reading = ?',
|
||||
whereArgs: [word.kanji],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
Future<List<int>> _findKanjiCandidates(JLPTRankedWord word, Database db) => db
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
columns: ['entryId'],
|
||||
where: 'reading = ?',
|
||||
whereArgs: [word.kanji],
|
||||
)
|
||||
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
|
||||
|
||||
Future<List<(int, String)>> _findSenseCandidates(
|
||||
JLPTRankedWord word,
|
||||
Database db,
|
||||
) =>
|
||||
db.rawQuery(
|
||||
) => db
|
||||
.rawQuery(
|
||||
'SELECT entryId, phrase '
|
||||
'FROM "${JMdictTableNames.senseGlossary}" '
|
||||
'JOIN "${JMdictTableNames.sense}" USING (senseId)'
|
||||
'WHERE phrase IN (${List.filled(
|
||||
word.meanings.length,
|
||||
'?',
|
||||
).join(',')})',
|
||||
'WHERE phrase IN (${List.filled(word.meanings.length, '?').join(',')})',
|
||||
[...word.meanings],
|
||||
).then(
|
||||
)
|
||||
.then(
|
||||
(rows) => rows
|
||||
.map((row) => (row['entryId'] as int, row['phrase'] as String))
|
||||
.toList(),
|
||||
@@ -55,8 +45,10 @@ Future<int?> findEntry(
|
||||
bool useOverrides = true,
|
||||
}) async {
|
||||
final List<int> readingCandidates = await _findReadingCandidates(word, db);
|
||||
final List<(int, String)> senseCandidates =
|
||||
await _findSenseCandidates(word, db);
|
||||
final List<(int, String)> senseCandidates = await _findSenseCandidates(
|
||||
word,
|
||||
db,
|
||||
);
|
||||
|
||||
List<int> entryIds;
|
||||
|
||||
@@ -71,8 +63,10 @@ Future<int?> findEntry(
|
||||
print('No entry found, trying to combine with senses');
|
||||
|
||||
entryIds = readingCandidates
|
||||
.where((readingId) =>
|
||||
senseCandidates.any((sense) => sense.$1 == readingId))
|
||||
.where(
|
||||
(readingId) =>
|
||||
senseCandidates.any((sense) => sense.$1 == readingId),
|
||||
)
|
||||
.toList();
|
||||
}
|
||||
} else {
|
||||
@@ -88,12 +82,15 @@ Future<int?> findEntry(
|
||||
|
||||
if (overrideEntries.length > 1) {
|
||||
throw Exception(
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds');
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
} else if (overrideEntries.length == 0 &&
|
||||
!word.readings.any((reading) =>
|
||||
TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)))) {
|
||||
!word.readings.any(
|
||||
(reading) => TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)),
|
||||
)) {
|
||||
throw Exception(
|
||||
'No override entry found for ${word.toString()}: $entryIds');
|
||||
'No override entry found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
}
|
||||
|
||||
print('Found override: ${overrideEntries.firstOrNull}');
|
||||
@@ -103,7 +100,8 @@ Future<int?> findEntry(
|
||||
|
||||
if (entryIds.length > 1) {
|
||||
throw Exception(
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds');
|
||||
'Multiple override entries found for ${word.toString()}: $entryIds',
|
||||
);
|
||||
} else if (entryIds.isEmpty) {
|
||||
throw Exception('No entry found for ${word.toString()}');
|
||||
}
|
||||
|
||||
@@ -12,13 +12,10 @@ Future<void> seedTanosJLPTData(
|
||||
final entryIds = jlptLevel.value;
|
||||
|
||||
for (final entryId in entryIds) {
|
||||
b.insert(
|
||||
TanosJLPTTableNames.jlptTag,
|
||||
{
|
||||
'entryId': entryId,
|
||||
'jlptLevel': level,
|
||||
},
|
||||
);
|
||||
b.insert(TanosJLPTTableNames.jlptTag, {
|
||||
'entryId': entryId,
|
||||
'jlptLevel': level,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -36,14 +36,17 @@ class CreateDb extends Command {
|
||||
);
|
||||
|
||||
bool failed = false;
|
||||
await seedData(db).then((_) {
|
||||
print("Database created successfully");
|
||||
}).catchError((error) {
|
||||
print("Error creating database: $error");
|
||||
failed = true;
|
||||
}).whenComplete(() {
|
||||
db.close();
|
||||
});
|
||||
await seedData(db)
|
||||
.then((_) {
|
||||
print("Database created successfully");
|
||||
})
|
||||
.catchError((error) {
|
||||
print("Error creating database: $error");
|
||||
failed = true;
|
||||
})
|
||||
.whenComplete(() {
|
||||
db.close();
|
||||
});
|
||||
if (failed) {
|
||||
exit(1);
|
||||
} else {
|
||||
|
||||
@@ -63,7 +63,8 @@ Future<void> resolveExisting(
|
||||
for (final (i, word) in rankedWords.indexed) {
|
||||
try {
|
||||
print(
|
||||
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}');
|
||||
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}',
|
||||
);
|
||||
await findEntry(word, db, useOverrides: useOverrides);
|
||||
} catch (e) {
|
||||
print(e);
|
||||
@@ -78,16 +79,19 @@ Future<void> resolveExisting(
|
||||
|
||||
print('Statistics:');
|
||||
for (final jlptLevel in ['N5', 'N4', 'N3', 'N2', 'N1']) {
|
||||
final missingWordCount =
|
||||
missingWords.where((e) => e.jlptLevel == jlptLevel).length;
|
||||
final totalWordCount =
|
||||
rankedWords.where((e) => e.jlptLevel == jlptLevel).length;
|
||||
final missingWordCount = missingWords
|
||||
.where((e) => e.jlptLevel == jlptLevel)
|
||||
.length;
|
||||
final totalWordCount = rankedWords
|
||||
.where((e) => e.jlptLevel == jlptLevel)
|
||||
.length;
|
||||
|
||||
final failureRate =
|
||||
((missingWordCount / totalWordCount) * 100).toStringAsFixed(2);
|
||||
final failureRate = ((missingWordCount / totalWordCount) * 100)
|
||||
.toStringAsFixed(2);
|
||||
|
||||
print(
|
||||
'${jlptLevel} failures: [${missingWordCount}/${totalWordCount}] (${failureRate}%)');
|
||||
'${jlptLevel} failures: [${missingWordCount}/${totalWordCount}] (${failureRate}%)',
|
||||
);
|
||||
}
|
||||
|
||||
print('Not able to determine the entry for ${missingWords.length} words');
|
||||
|
||||
@@ -17,24 +17,11 @@ class QueryWord extends Command {
|
||||
addLibsqliteArg(argParser);
|
||||
addJadbArg(argParser);
|
||||
|
||||
argParser.addFlag(
|
||||
'json',
|
||||
abbr: 'j',
|
||||
help: 'Output results in JSON format',
|
||||
);
|
||||
argParser.addFlag('json', abbr: 'j', help: 'Output results in JSON format');
|
||||
|
||||
argParser.addOption(
|
||||
'page',
|
||||
abbr: 'p',
|
||||
valueHelp: 'NUM',
|
||||
defaultsTo: '1',
|
||||
);
|
||||
argParser.addOption('page', abbr: 'p', valueHelp: 'NUM', defaultsTo: '1');
|
||||
|
||||
argParser.addOption(
|
||||
'pageSize',
|
||||
valueHelp: 'NUM',
|
||||
defaultsTo: '30',
|
||||
);
|
||||
argParser.addOption('pageSize', valueHelp: 'NUM', defaultsTo: '30');
|
||||
}
|
||||
|
||||
Future<void> run() async {
|
||||
@@ -62,11 +49,7 @@ class QueryWord extends Command {
|
||||
final int? maybeId = int.tryParse(searchWord);
|
||||
|
||||
if (maybeId != null && maybeId >= 1000000) {
|
||||
await _searchId(
|
||||
db,
|
||||
maybeId,
|
||||
argResults!.flag('json'),
|
||||
);
|
||||
await _searchId(db, maybeId, argResults!.flag('json'));
|
||||
} else {
|
||||
await _searchWord(
|
||||
db,
|
||||
@@ -78,11 +61,7 @@ class QueryWord extends Command {
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _searchId(
|
||||
DatabaseExecutor db,
|
||||
int id,
|
||||
bool jsonOutput,
|
||||
) async {
|
||||
Future<void> _searchId(DatabaseExecutor db, int id, bool jsonOutput) async {
|
||||
final time = Stopwatch()..start();
|
||||
final result = await JaDBConnection(db).jadbGetWordById(id);
|
||||
time.stop();
|
||||
@@ -112,11 +91,9 @@ class QueryWord extends Command {
|
||||
time.stop();
|
||||
|
||||
final time2 = Stopwatch()..start();
|
||||
final result = await JaDBConnection(db).jadbSearchWord(
|
||||
searchWord,
|
||||
page: page,
|
||||
pageSize: pageSize,
|
||||
);
|
||||
final result = await JaDBConnection(
|
||||
db,
|
||||
).jadbSearchWord(searchWord, page: page, pageSize: pageSize);
|
||||
time2.stop();
|
||||
|
||||
if (result == null) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/// Jouyou kanji sorted primarily by grades and secondarily by strokes.
|
||||
const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
{
|
||||
const Map<int, Map<int, List<String>>>
|
||||
JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT = {
|
||||
1: {
|
||||
1: ['一'],
|
||||
2: ['力', '八', '入', '二', '人', '十', '七', '九'],
|
||||
@@ -12,7 +12,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
8: ['林', '青', '空', '金', '学', '雨'],
|
||||
9: ['草', '音'],
|
||||
10: ['校'],
|
||||
12: ['森']
|
||||
12: ['森'],
|
||||
},
|
||||
2: {
|
||||
2: ['刀'],
|
||||
@@ -35,7 +35,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'戸',
|
||||
'元',
|
||||
'牛',
|
||||
'引'
|
||||
'引',
|
||||
],
|
||||
5: ['用', '北', '母', '半', '冬', '台', '矢', '市', '広', '古', '兄', '外'],
|
||||
6: [
|
||||
@@ -58,7 +58,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'交',
|
||||
'会',
|
||||
'回',
|
||||
'羽'
|
||||
'羽',
|
||||
],
|
||||
7: [
|
||||
'里',
|
||||
@@ -78,7 +78,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'近',
|
||||
'汽',
|
||||
'角',
|
||||
'何'
|
||||
'何',
|
||||
],
|
||||
8: [
|
||||
'夜',
|
||||
@@ -95,7 +95,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'国',
|
||||
'京',
|
||||
'岩',
|
||||
'画'
|
||||
'画',
|
||||
],
|
||||
9: [
|
||||
'風',
|
||||
@@ -115,7 +115,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'計',
|
||||
'活',
|
||||
'海',
|
||||
'科'
|
||||
'科',
|
||||
],
|
||||
10: ['馬', '通', '書', '弱', '時', '紙', '高', '原', '帰', '記', '家', '夏'],
|
||||
11: ['理', '野', '鳥', '組', '船', '雪', '週', '細', '黒', '黄', '教', '強', '魚'],
|
||||
@@ -124,7 +124,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
14: ['鳴', '聞', '読', '算', '語', '歌'],
|
||||
15: ['線'],
|
||||
16: ['頭', '親'],
|
||||
18: ['曜', '顔']
|
||||
18: ['曜', '顔'],
|
||||
},
|
||||
3: {
|
||||
2: ['丁'],
|
||||
@@ -146,7 +146,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'皿',
|
||||
'号',
|
||||
'去',
|
||||
'央'
|
||||
'央',
|
||||
],
|
||||
6: ['列', '両', '羊', '有', '全', '州', '守', '式', '次', '死', '向', '血', '曲', '安'],
|
||||
7: ['役', '返', '坂', '豆', '投', '対', '身', '助', '住', '決', '君', '局', '究', '医'],
|
||||
@@ -178,7 +178,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'岸',
|
||||
'泳',
|
||||
'育',
|
||||
'委'
|
||||
'委',
|
||||
],
|
||||
9: [
|
||||
'洋',
|
||||
@@ -210,7 +210,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'急',
|
||||
'客',
|
||||
'界',
|
||||
'屋'
|
||||
'屋',
|
||||
],
|
||||
10: [
|
||||
'旅',
|
||||
@@ -232,7 +232,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'起',
|
||||
'荷',
|
||||
'院',
|
||||
'員'
|
||||
'員',
|
||||
],
|
||||
11: [
|
||||
'問',
|
||||
@@ -253,7 +253,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'終',
|
||||
'祭',
|
||||
'球',
|
||||
'悪'
|
||||
'悪',
|
||||
],
|
||||
12: [
|
||||
'落',
|
||||
@@ -282,13 +282,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'開',
|
||||
'温',
|
||||
'運',
|
||||
'飲'
|
||||
'飲',
|
||||
],
|
||||
13: ['路', '福', '農', '鉄', '想', '詩', '業', '漢', '感', '意', '暗'],
|
||||
14: ['練', '緑', '様', '鼻', '銀', '駅'],
|
||||
15: ['箱', '調', '談', '横'],
|
||||
16: ['薬', '整', '橋', '館'],
|
||||
18: ['題']
|
||||
18: ['題'],
|
||||
},
|
||||
4: {
|
||||
4: ['夫', '不', '井', '氏', '欠'],
|
||||
@@ -318,7 +318,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'岐',
|
||||
'完',
|
||||
'改',
|
||||
'位'
|
||||
'位',
|
||||
],
|
||||
8: [
|
||||
'例',
|
||||
@@ -346,7 +346,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'芽',
|
||||
'果',
|
||||
'岡',
|
||||
'英'
|
||||
'英',
|
||||
],
|
||||
9: [
|
||||
'要',
|
||||
@@ -367,7 +367,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'建',
|
||||
'軍',
|
||||
'栄',
|
||||
'茨'
|
||||
'茨',
|
||||
],
|
||||
10: [
|
||||
'連',
|
||||
@@ -389,7 +389,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'訓',
|
||||
'挙',
|
||||
'害',
|
||||
'案'
|
||||
'案',
|
||||
],
|
||||
11: [
|
||||
'陸',
|
||||
@@ -410,7 +410,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'康',
|
||||
'健',
|
||||
'械',
|
||||
'貨'
|
||||
'貨',
|
||||
],
|
||||
12: [
|
||||
'量',
|
||||
@@ -434,7 +434,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'覚',
|
||||
'街',
|
||||
'賀',
|
||||
'媛'
|
||||
'媛',
|
||||
],
|
||||
13: ['働', '置', '続', '戦', '節', '照', '辞', '試', '群', '塩', '愛'],
|
||||
14: ['徳', '説', '静', '種', '察', '熊', '漁', '旗', '関', '管'],
|
||||
@@ -442,7 +442,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
16: ['録', '積', '機'],
|
||||
18: ['類', '験', '観'],
|
||||
19: ['鏡', '願'],
|
||||
20: ['競', '議']
|
||||
20: ['競', '議'],
|
||||
},
|
||||
5: {
|
||||
3: ['士', '久'],
|
||||
@@ -464,7 +464,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'技',
|
||||
'快',
|
||||
'応',
|
||||
'囲'
|
||||
'囲',
|
||||
],
|
||||
8: [
|
||||
'武',
|
||||
@@ -484,7 +484,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'河',
|
||||
'価',
|
||||
'往',
|
||||
'易'
|
||||
'易',
|
||||
],
|
||||
9: ['迷', '保', '独', '則', '祖', '政', '査', '厚', '故', '限', '型', '逆', '紀'],
|
||||
10: [
|
||||
@@ -505,7 +505,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'個',
|
||||
'格',
|
||||
'桜',
|
||||
'益'
|
||||
'益',
|
||||
],
|
||||
11: [
|
||||
'略',
|
||||
@@ -537,7 +537,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'基',
|
||||
'眼',
|
||||
'液',
|
||||
'移'
|
||||
'移',
|
||||
],
|
||||
12: [
|
||||
'貿',
|
||||
@@ -561,7 +561,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'検',
|
||||
'喜',
|
||||
'過',
|
||||
'営'
|
||||
'営',
|
||||
],
|
||||
13: ['夢', '豊', '墓', '損', '勢', '準', '飼', '資', '罪', '鉱', '禁', '義', '幹', '解'],
|
||||
14: [
|
||||
@@ -583,14 +583,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'構',
|
||||
'境',
|
||||
'慣',
|
||||
'演'
|
||||
'演',
|
||||
],
|
||||
15: ['暴', '編', '導', '賞', '質', '賛', '潔', '確'],
|
||||
16: ['輸', '燃', '築', '興', '衛'],
|
||||
17: ['績', '謝', '講'],
|
||||
18: ['職', '織', '額'],
|
||||
19: ['識'],
|
||||
20: ['護']
|
||||
20: ['護'],
|
||||
},
|
||||
6: {
|
||||
3: ['亡', '寸', '己', '干'],
|
||||
@@ -618,7 +618,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'供',
|
||||
'拡',
|
||||
'沿',
|
||||
'延'
|
||||
'延',
|
||||
],
|
||||
9: [
|
||||
'律',
|
||||
@@ -641,7 +641,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'巻',
|
||||
'革',
|
||||
'映',
|
||||
'胃'
|
||||
'胃',
|
||||
],
|
||||
10: [
|
||||
'朗',
|
||||
@@ -667,7 +667,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'降',
|
||||
'胸',
|
||||
'株',
|
||||
'恩'
|
||||
'恩',
|
||||
],
|
||||
11: [
|
||||
'翌',
|
||||
@@ -689,7 +689,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'済',
|
||||
'郷',
|
||||
'域',
|
||||
'異'
|
||||
'異',
|
||||
],
|
||||
12: [
|
||||
'棒',
|
||||
@@ -710,7 +710,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'勤',
|
||||
'貴',
|
||||
'揮',
|
||||
'割'
|
||||
'割',
|
||||
],
|
||||
13: [
|
||||
'裏',
|
||||
@@ -727,14 +727,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'傷',
|
||||
'署',
|
||||
'源',
|
||||
'絹'
|
||||
'絹',
|
||||
],
|
||||
14: ['模', '暮', '認', '層', '銭', '障', '磁', '誌', '穀', '誤', '疑', '閣'],
|
||||
15: ['論', '敵', '潮', '誕', '蔵', '諸', '熟', '権', '劇', '遺'],
|
||||
16: ['奮', '糖', '操', '縦', '樹', '鋼', '憲', '激'],
|
||||
17: ['覧', '優', '縮', '厳'],
|
||||
18: ['臨', '難', '簡'],
|
||||
19: ['臓', '警']
|
||||
19: ['臓', '警'],
|
||||
},
|
||||
7: {
|
||||
1: ['乙'],
|
||||
@@ -760,7 +760,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'斤',
|
||||
'凶',
|
||||
'刈',
|
||||
'介'
|
||||
'介',
|
||||
],
|
||||
5: [
|
||||
'矛',
|
||||
@@ -790,7 +790,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'且',
|
||||
'瓦',
|
||||
'牙',
|
||||
'凹'
|
||||
'凹',
|
||||
],
|
||||
6: [
|
||||
'劣',
|
||||
@@ -831,7 +831,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'汗',
|
||||
'汚',
|
||||
'芋',
|
||||
'扱'
|
||||
'扱',
|
||||
],
|
||||
7: [
|
||||
'弄',
|
||||
@@ -896,7 +896,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'肝',
|
||||
'戒',
|
||||
'壱',
|
||||
'亜'
|
||||
'亜',
|
||||
],
|
||||
8: [
|
||||
'枠',
|
||||
@@ -989,7 +989,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'押',
|
||||
'炎',
|
||||
'依',
|
||||
'宛'
|
||||
'宛',
|
||||
],
|
||||
9: [
|
||||
'郎',
|
||||
@@ -1081,7 +1081,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'畏',
|
||||
'為',
|
||||
'威',
|
||||
'哀'
|
||||
'哀',
|
||||
],
|
||||
10: [
|
||||
'脇',
|
||||
@@ -1206,7 +1206,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'浦',
|
||||
'畝',
|
||||
'唄',
|
||||
'挨'
|
||||
'挨',
|
||||
],
|
||||
11: [
|
||||
'累',
|
||||
@@ -1323,7 +1323,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'淫',
|
||||
'逸',
|
||||
'萎',
|
||||
'尉'
|
||||
'尉',
|
||||
],
|
||||
12: [
|
||||
'腕',
|
||||
@@ -1435,7 +1435,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'椅',
|
||||
'偉',
|
||||
'嵐',
|
||||
'握'
|
||||
'握',
|
||||
],
|
||||
13: [
|
||||
'賄',
|
||||
@@ -1552,7 +1552,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'猿',
|
||||
'煙',
|
||||
'違',
|
||||
'彙'
|
||||
'彙',
|
||||
],
|
||||
14: [
|
||||
'漏',
|
||||
@@ -1617,7 +1617,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'箇',
|
||||
'寡',
|
||||
'隠',
|
||||
'維'
|
||||
'維',
|
||||
],
|
||||
15: [
|
||||
'霊',
|
||||
@@ -1706,7 +1706,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'謁',
|
||||
'鋭',
|
||||
'影',
|
||||
'慰'
|
||||
'慰',
|
||||
],
|
||||
16: [
|
||||
'錬',
|
||||
@@ -1764,7 +1764,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'壊',
|
||||
'穏',
|
||||
'憶',
|
||||
'緯'
|
||||
'緯',
|
||||
],
|
||||
17: [
|
||||
'齢',
|
||||
@@ -1801,7 +1801,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'轄',
|
||||
'嚇',
|
||||
'臆',
|
||||
'曖'
|
||||
'曖',
|
||||
],
|
||||
18: [
|
||||
'糧',
|
||||
@@ -1830,7 +1830,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'韓',
|
||||
'鎌',
|
||||
'顎',
|
||||
'穫'
|
||||
'穫',
|
||||
],
|
||||
19: [
|
||||
'麓',
|
||||
@@ -1851,13 +1851,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
|
||||
'鶏',
|
||||
'繰',
|
||||
'艶',
|
||||
'韻'
|
||||
'韻',
|
||||
],
|
||||
20: ['欄', '騰', '籍', '醸', '譲', '鐘', '懸', '響'],
|
||||
21: ['露', '躍', '魔', '鶴', '顧', '艦'],
|
||||
22: ['籠', '襲', '驚'],
|
||||
23: ['鑑'],
|
||||
29: ['鬱']
|
||||
29: ['鬱'],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1866,7 +1866,8 @@ final Map<int, List<String>> JOUYOU_KANJI_BY_GRADES =
|
||||
.expand((entry) => entry.value.entries)
|
||||
.map((entry) => MapEntry(entry.key, entry.value))
|
||||
.fold<Map<int, List<String>>>(
|
||||
{},
|
||||
(acc, entry) => acc
|
||||
..putIfAbsent(entry.key, () => [])
|
||||
..update(entry.key, (value) => value..addAll(entry.value)));
|
||||
{},
|
||||
(acc, entry) => acc
|
||||
..putIfAbsent(entry.key, () => [])
|
||||
..update(entry.key, (value) => value..addAll(entry.value)),
|
||||
);
|
||||
|
||||
@@ -31,7 +31,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'九',
|
||||
'ユ',
|
||||
'乃',
|
||||
'𠂉'
|
||||
'𠂉',
|
||||
],
|
||||
3: [
|
||||
'⻌',
|
||||
@@ -78,7 +78,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'也',
|
||||
'亡',
|
||||
'及',
|
||||
'久'
|
||||
'久',
|
||||
],
|
||||
4: [
|
||||
'⺹',
|
||||
@@ -124,7 +124,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'五',
|
||||
'屯',
|
||||
'巴',
|
||||
'毋'
|
||||
'毋',
|
||||
],
|
||||
5: [
|
||||
'玄',
|
||||
@@ -154,7 +154,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'冊',
|
||||
'母',
|
||||
'⺲',
|
||||
'牙'
|
||||
'牙',
|
||||
],
|
||||
6: [
|
||||
'瓜',
|
||||
@@ -181,7 +181,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'血',
|
||||
'行',
|
||||
'衣',
|
||||
'西'
|
||||
'西',
|
||||
],
|
||||
7: [
|
||||
'臣',
|
||||
@@ -204,7 +204,7 @@ const Map<int, List<String>> RADICALS = {
|
||||
'釆',
|
||||
'里',
|
||||
'舛',
|
||||
'麦'
|
||||
'麦',
|
||||
],
|
||||
8: ['金', '長', '門', '隶', '隹', '雨', '青', '非', '奄', '岡', '免', '斉'],
|
||||
9: ['面', '革', '韭', '音', '頁', '風', '飛', '食', '首', '香', '品'],
|
||||
|
||||
@@ -19,20 +19,14 @@ enum JMdictDialect {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictDialect({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictDialect({required this.id, required this.description});
|
||||
|
||||
static JMdictDialect fromId(String id) => JMdictDialect.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictDialect fromJson(Map<String, Object?> json) =>
|
||||
JMdictDialect.values.firstWhere(
|
||||
|
||||
@@ -102,20 +102,14 @@ enum JMdictField {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictField({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictField({required this.id, required this.description});
|
||||
|
||||
static JMdictField fromId(String id) => JMdictField.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictField fromJson(Map<String, Object?> json) =>
|
||||
JMdictField.values.firstWhere(
|
||||
|
||||
@@ -13,20 +13,14 @@ enum JMdictKanjiInfo {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictKanjiInfo({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictKanjiInfo({required this.id, required this.description});
|
||||
|
||||
static JMdictKanjiInfo fromId(String id) => JMdictKanjiInfo.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictKanjiInfo fromJson(Map<String, Object?> json) =>
|
||||
JMdictKanjiInfo.values.firstWhere(
|
||||
|
||||
@@ -74,20 +74,14 @@ enum JMdictMisc {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictMisc({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictMisc({required this.id, required this.description});
|
||||
|
||||
static JMdictMisc fromId(String id) => JMdictMisc.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictMisc fromJson(Map<String, Object?> json) =>
|
||||
JMdictMisc.values.firstWhere(
|
||||
|
||||
@@ -202,14 +202,11 @@ enum JMdictPOS {
|
||||
String get shortDescription => _shortDescription ?? description;
|
||||
|
||||
static JMdictPOS fromId(String id) => JMdictPOS.values.firstWhere(
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
(e) => e.id == id,
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictPOS fromJson(Map<String, Object?> json) =>
|
||||
JMdictPOS.values.firstWhere(
|
||||
|
||||
@@ -15,10 +15,7 @@ enum JMdictReadingInfo {
|
||||
final String id;
|
||||
final String description;
|
||||
|
||||
const JMdictReadingInfo({
|
||||
required this.id,
|
||||
required this.description,
|
||||
});
|
||||
const JMdictReadingInfo({required this.id, required this.description});
|
||||
|
||||
static JMdictReadingInfo fromId(String id) =>
|
||||
JMdictReadingInfo.values.firstWhere(
|
||||
@@ -26,10 +23,7 @@ enum JMdictReadingInfo {
|
||||
orElse: () => throw Exception('Unknown id: $id'),
|
||||
);
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'id': id,
|
||||
'description': description,
|
||||
};
|
||||
Map<String, Object?> toJson() => {'id': id, 'description': description};
|
||||
|
||||
static JMdictReadingInfo fromJson(Map<String, Object?> json) =>
|
||||
JMdictReadingInfo.values.firstWhere(
|
||||
|
||||
@@ -26,19 +26,14 @@ class KanjiSearchRadical extends Equatable {
|
||||
});
|
||||
|
||||
@override
|
||||
List<Object> get props => [
|
||||
symbol,
|
||||
this.names,
|
||||
forms,
|
||||
meanings,
|
||||
];
|
||||
List<Object> get props => [symbol, this.names, forms, meanings];
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'symbol': symbol,
|
||||
'names': names,
|
||||
'forms': forms,
|
||||
'meanings': meanings,
|
||||
};
|
||||
'symbol': symbol,
|
||||
'names': names,
|
||||
'forms': forms,
|
||||
'meanings': meanings,
|
||||
};
|
||||
|
||||
factory KanjiSearchRadical.fromJson(Map<String, dynamic> json) {
|
||||
return KanjiSearchRadical(
|
||||
|
||||
@@ -89,46 +89,46 @@ class KanjiSearchResult extends Equatable {
|
||||
@override
|
||||
// ignore: public_member_api_docs
|
||||
List<Object?> get props => [
|
||||
taughtIn,
|
||||
jlptLevel,
|
||||
newspaperFrequencyRank,
|
||||
strokeCount,
|
||||
meanings,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
// kunyomiExamples,
|
||||
// onyomiExamples,
|
||||
radical,
|
||||
parts,
|
||||
codepoints,
|
||||
kanji,
|
||||
nanori,
|
||||
alternativeLanguageReadings,
|
||||
strokeMiscounts,
|
||||
queryCodes,
|
||||
dictionaryReferences,
|
||||
];
|
||||
taughtIn,
|
||||
jlptLevel,
|
||||
newspaperFrequencyRank,
|
||||
strokeCount,
|
||||
meanings,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
// kunyomiExamples,
|
||||
// onyomiExamples,
|
||||
radical,
|
||||
parts,
|
||||
codepoints,
|
||||
kanji,
|
||||
nanori,
|
||||
alternativeLanguageReadings,
|
||||
strokeMiscounts,
|
||||
queryCodes,
|
||||
dictionaryReferences,
|
||||
];
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'kanji': kanji,
|
||||
'taughtIn': taughtIn,
|
||||
'jlptLevel': jlptLevel,
|
||||
'newspaperFrequencyRank': newspaperFrequencyRank,
|
||||
'strokeCount': strokeCount,
|
||||
'meanings': meanings,
|
||||
'kunyomi': kunyomi,
|
||||
'onyomi': onyomi,
|
||||
// 'onyomiExamples': onyomiExamples,
|
||||
// 'kunyomiExamples': kunyomiExamples,
|
||||
'radical': radical?.toJson(),
|
||||
'parts': parts,
|
||||
'codepoints': codepoints,
|
||||
'nanori': nanori,
|
||||
'alternativeLanguageReadings': alternativeLanguageReadings,
|
||||
'strokeMiscounts': strokeMiscounts,
|
||||
'queryCodes': queryCodes,
|
||||
'dictionaryReferences': dictionaryReferences,
|
||||
};
|
||||
'kanji': kanji,
|
||||
'taughtIn': taughtIn,
|
||||
'jlptLevel': jlptLevel,
|
||||
'newspaperFrequencyRank': newspaperFrequencyRank,
|
||||
'strokeCount': strokeCount,
|
||||
'meanings': meanings,
|
||||
'kunyomi': kunyomi,
|
||||
'onyomi': onyomi,
|
||||
// 'onyomiExamples': onyomiExamples,
|
||||
// 'kunyomiExamples': kunyomiExamples,
|
||||
'radical': radical?.toJson(),
|
||||
'parts': parts,
|
||||
'codepoints': codepoints,
|
||||
'nanori': nanori,
|
||||
'alternativeLanguageReadings': alternativeLanguageReadings,
|
||||
'strokeMiscounts': strokeMiscounts,
|
||||
'queryCodes': queryCodes,
|
||||
'dictionaryReferences': dictionaryReferences,
|
||||
};
|
||||
|
||||
factory KanjiSearchResult.fromJson(Map<String, dynamic> json) {
|
||||
return KanjiSearchResult(
|
||||
@@ -156,23 +156,20 @@ class KanjiSearchResult extends Equatable {
|
||||
nanori: (json['nanori'] as List).map((e) => e as String).toList(),
|
||||
alternativeLanguageReadings:
|
||||
(json['alternativeLanguageReadings'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
(value as List).map((e) => e as String).toList(),
|
||||
),
|
||||
),
|
||||
strokeMiscounts:
|
||||
(json['strokeMiscounts'] as List).map((e) => e as int).toList(),
|
||||
(key, value) =>
|
||||
MapEntry(key, (value as List).map((e) => e as String).toList()),
|
||||
),
|
||||
strokeMiscounts: (json['strokeMiscounts'] as List)
|
||||
.map((e) => e as int)
|
||||
.toList(),
|
||||
queryCodes: (json['queryCodes'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
(value as List).map((e) => e as String).toList(),
|
||||
),
|
||||
(key, value) =>
|
||||
MapEntry(key, (value as List).map((e) => e as String).toList()),
|
||||
),
|
||||
dictionaryReferences:
|
||||
(json['dictionaryReferences'] as Map<String, dynamic>).map(
|
||||
(key, value) => MapEntry(key, value as String),
|
||||
),
|
||||
(key, value) => MapEntry(key, value as String),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,14 +7,14 @@ import 'package:sqflite_common/sqlite_api.dart';
|
||||
Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
|
||||
final Set<String> tables = await db
|
||||
.query(
|
||||
'sqlite_master',
|
||||
columns: ['name'],
|
||||
where: 'type = ?',
|
||||
whereArgs: ['table'],
|
||||
)
|
||||
'sqlite_master',
|
||||
columns: ['name'],
|
||||
where: 'type = ?',
|
||||
whereArgs: ['table'],
|
||||
)
|
||||
.then((result) {
|
||||
return result.map((row) => row['name'] as String).toSet();
|
||||
});
|
||||
return result.map((row) => row['name'] as String).toSet();
|
||||
});
|
||||
|
||||
final Set<String> expectedTables = {
|
||||
...JMdictTableNames.allTables,
|
||||
@@ -26,14 +26,16 @@ Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
|
||||
final missingTables = expectedTables.difference(tables);
|
||||
|
||||
if (missingTables.isNotEmpty) {
|
||||
throw Exception([
|
||||
'Missing tables:',
|
||||
missingTables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Found tables:\n',
|
||||
tables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Please ensure the database is correctly set up.',
|
||||
].join('\n'));
|
||||
throw Exception(
|
||||
[
|
||||
'Missing tables:',
|
||||
missingTables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Found tables:\n',
|
||||
tables.map((table) => ' - $table').join('\n'),
|
||||
'',
|
||||
'Please ensure the database is correctly set up.',
|
||||
].join('\n'),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,18 +47,18 @@ class WordSearchResult {
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'_score': score,
|
||||
'entryId': entryId,
|
||||
'isCommon': isCommon,
|
||||
'japanese': japanese.map((e) => e.toJson()).toList(),
|
||||
'kanjiInfo':
|
||||
kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'readingInfo':
|
||||
readingInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'senses': senses.map((e) => e.toJson()).toList(),
|
||||
'jlptLevel': jlptLevel.toJson(),
|
||||
'sources': sources.toJson(),
|
||||
};
|
||||
'_score': score,
|
||||
'entryId': entryId,
|
||||
'isCommon': isCommon,
|
||||
'japanese': japanese.map((e) => e.toJson()).toList(),
|
||||
'kanjiInfo': kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
|
||||
'readingInfo': readingInfo.map(
|
||||
(key, value) => MapEntry(key, value.toJson()),
|
||||
),
|
||||
'senses': senses.map((e) => e.toJson()).toList(),
|
||||
'jlptLevel': jlptLevel.toJson(),
|
||||
'sources': sources.toJson(),
|
||||
};
|
||||
|
||||
factory WordSearchResult.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchResult(
|
||||
|
||||
@@ -6,18 +6,12 @@ class WordSearchRuby {
|
||||
/// Furigana, if applicable.
|
||||
String? furigana;
|
||||
|
||||
WordSearchRuby({
|
||||
required this.base,
|
||||
this.furigana,
|
||||
});
|
||||
WordSearchRuby({required this.base, this.furigana});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'base': base,
|
||||
'furigana': furigana,
|
||||
};
|
||||
Map<String, dynamic> toJson() => {'base': base, 'furigana': furigana};
|
||||
|
||||
factory WordSearchRuby.fromJson(Map<String, dynamic> json) => WordSearchRuby(
|
||||
base: json['base'] as String,
|
||||
furigana: json['furigana'] as String?,
|
||||
);
|
||||
base: json['base'] as String,
|
||||
furigana: json['furigana'] as String?,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -71,18 +71,18 @@ class WordSearchSense {
|
||||
languageSource.isEmpty;
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'englishDefinitions': englishDefinitions,
|
||||
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
|
||||
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
|
||||
'antonyms': antonyms.map((e) => e.toJson()).toList(),
|
||||
'restrictedToReading': restrictedToReading,
|
||||
'restrictedToKanji': restrictedToKanji,
|
||||
'fields': fields.map((e) => e.toJson()).toList(),
|
||||
'dialects': dialects.map((e) => e.toJson()).toList(),
|
||||
'misc': misc.map((e) => e.toJson()).toList(),
|
||||
'info': info,
|
||||
'languageSource': languageSource,
|
||||
};
|
||||
'englishDefinitions': englishDefinitions,
|
||||
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
|
||||
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
|
||||
'antonyms': antonyms.map((e) => e.toJson()).toList(),
|
||||
'restrictedToReading': restrictedToReading,
|
||||
'restrictedToKanji': restrictedToKanji,
|
||||
'fields': fields.map((e) => e.toJson()).toList(),
|
||||
'dialects': dialects.map((e) => e.toJson()).toList(),
|
||||
'misc': misc.map((e) => e.toJson()).toList(),
|
||||
'info': info,
|
||||
'languageSource': languageSource,
|
||||
};
|
||||
|
||||
factory WordSearchSense.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSense(
|
||||
@@ -104,8 +104,9 @@ class WordSearchSense {
|
||||
dialects: (json['dialects'] as List)
|
||||
.map((e) => JMdictDialect.fromJson(e))
|
||||
.toList(),
|
||||
misc:
|
||||
(json['misc'] as List).map((e) => JMdictMisc.fromJson(e)).toList(),
|
||||
misc: (json['misc'] as List)
|
||||
.map((e) => JMdictMisc.fromJson(e))
|
||||
.toList(),
|
||||
info: List<String>.from(json['info']),
|
||||
languageSource: (json['languageSource'] as List)
|
||||
.map((e) => WordSearchSenseLanguageSource.fromJson(e))
|
||||
|
||||
@@ -13,11 +13,11 @@ class WordSearchSenseLanguageSource {
|
||||
});
|
||||
|
||||
Map<String, Object?> toJson() => {
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
'language': language,
|
||||
'phrase': phrase,
|
||||
'fullyDescribesSense': fullyDescribesSense,
|
||||
'constructedFromSmallerWords': constructedFromSmallerWords,
|
||||
};
|
||||
|
||||
factory WordSearchSenseLanguageSource.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSenseLanguageSource(
|
||||
|
||||
@@ -7,20 +7,11 @@ class WordSearchSources {
|
||||
/// Whether JMnedict was used.
|
||||
final bool jmnedict;
|
||||
|
||||
const WordSearchSources({
|
||||
this.jmdict = true,
|
||||
this.jmnedict = false,
|
||||
});
|
||||
const WordSearchSources({this.jmdict = true, this.jmnedict = false});
|
||||
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'jmdict': jmdict,
|
||||
'jmnedict': jmnedict,
|
||||
};
|
||||
Map<String, Object?> get sqlValue => {'jmdict': jmdict, 'jmnedict': jmnedict};
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'jmdict': jmdict,
|
||||
'jmnedict': jmnedict,
|
||||
};
|
||||
Map<String, dynamic> toJson() => {'jmdict': jmdict, 'jmnedict': jmnedict};
|
||||
|
||||
factory WordSearchSources.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchSources(
|
||||
|
||||
@@ -21,11 +21,11 @@ class WordSearchXrefEntry {
|
||||
});
|
||||
|
||||
Map<String, dynamic> toJson() => {
|
||||
'entryId': entryId,
|
||||
'ambiguous': ambiguous,
|
||||
'baseWord': baseWord,
|
||||
'furigana': furigana,
|
||||
};
|
||||
'entryId': entryId,
|
||||
'ambiguous': ambiguous,
|
||||
'baseWord': baseWord,
|
||||
'furigana': furigana,
|
||||
};
|
||||
|
||||
factory WordSearchXrefEntry.fromJson(Map<String, dynamic> json) =>
|
||||
WordSearchXrefEntry(
|
||||
|
||||
@@ -27,8 +27,7 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
Future<List<String>> filterKanji(
|
||||
List<String> kanji, {
|
||||
bool deduplicate = false,
|
||||
}) =>
|
||||
filterKanjiWithDbConnection(this, kanji, deduplicate);
|
||||
}) => filterKanjiWithDbConnection(this, kanji, deduplicate);
|
||||
|
||||
/// Search for a word in the database.
|
||||
Future<List<WordSearchResult>?> jadbSearchWord(
|
||||
@@ -36,14 +35,13 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
int page = 0,
|
||||
int? pageSize,
|
||||
}) =>
|
||||
searchWordWithDbConnection(
|
||||
this,
|
||||
word,
|
||||
searchMode: searchMode,
|
||||
page: page,
|
||||
pageSize: pageSize,
|
||||
);
|
||||
}) => searchWordWithDbConnection(
|
||||
this,
|
||||
word,
|
||||
searchMode: searchMode,
|
||||
page: page,
|
||||
pageSize: pageSize,
|
||||
);
|
||||
|
||||
///
|
||||
Future<WordSearchResult?> jadbGetWordById(int id) =>
|
||||
@@ -59,12 +57,7 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
Future<int?> jadbSearchWordCount(
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
}) =>
|
||||
searchWordCountWithDbConnection(
|
||||
this,
|
||||
word,
|
||||
searchMode: searchMode,
|
||||
);
|
||||
}) => searchWordCountWithDbConnection(this, word, searchMode: searchMode);
|
||||
|
||||
/// Given a list of radicals, search which kanji contains all
|
||||
/// of the radicals, find their other radicals, and return those.
|
||||
|
||||
@@ -6,14 +6,13 @@ Future<List<String>> filterKanjiWithDbConnection(
|
||||
List<String> kanji,
|
||||
bool deduplicate,
|
||||
) async {
|
||||
final Set<String> filteredKanji = await connection.rawQuery(
|
||||
'''
|
||||
final Set<String> filteredKanji = await connection
|
||||
.rawQuery('''
|
||||
SELECT "literal"
|
||||
FROM "${KANJIDICTableNames.character}"
|
||||
WHERE "literal" IN (${kanji.map((_) => '?').join(',')})
|
||||
''',
|
||||
kanji,
|
||||
).then((value) => value.map((e) => e['literal'] as String).toSet());
|
||||
''', kanji)
|
||||
.then((value) => value.map((e) => e['literal'] as String).toSet());
|
||||
|
||||
if (deduplicate) {
|
||||
return filteredKanji.toList();
|
||||
|
||||
@@ -116,10 +116,10 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
// whereArgs: [kanji],
|
||||
// );
|
||||
|
||||
// TODO: Search for kunyomi and onyomi usage of the characters
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
|
||||
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
|
||||
// by JLPT, news frequency, etc.
|
||||
// TODO: Search for kunyomi and onyomi usage of the characters
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
|
||||
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
|
||||
// by JLPT, news frequency, etc.
|
||||
|
||||
await characters_query.then((value) => characters = value);
|
||||
|
||||
@@ -157,9 +157,7 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
: null;
|
||||
|
||||
final alternativeLanguageReadings = readings
|
||||
.groupListsBy(
|
||||
(item) => item['type'] as String,
|
||||
)
|
||||
.groupListsBy((item) => item['type'] as String)
|
||||
.map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
@@ -169,14 +167,10 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
|
||||
// TODO: Add `SKIPMisclassification` to the entries
|
||||
final queryCodes = query_codes
|
||||
.groupListsBy(
|
||||
(item) => item['type'] as String,
|
||||
)
|
||||
.groupListsBy((item) => item['type'] as String)
|
||||
.map(
|
||||
(key, value) => MapEntry(
|
||||
key,
|
||||
value.map((item) => item['code'] as String).toList(),
|
||||
),
|
||||
(key, value) =>
|
||||
MapEntry(key, value.map((item) => item['code'] as String).toList()),
|
||||
);
|
||||
|
||||
// TODO: Add `volume` and `page` to the entries
|
||||
@@ -213,8 +207,9 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
},
|
||||
nanori: nanoris.map((item) => item['nanori'] as String).toList(),
|
||||
alternativeLanguageReadings: alternativeLanguageReadings,
|
||||
strokeMiscounts:
|
||||
stroke_miscounts.map((item) => item['strokeCount'] as int).toList(),
|
||||
strokeMiscounts: stroke_miscounts
|
||||
.map((item) => item['strokeCount'] as int)
|
||||
.toList(),
|
||||
queryCodes: queryCodes,
|
||||
dictionaryReferences: dictionaryReferences,
|
||||
);
|
||||
|
||||
@@ -19,14 +19,12 @@ Future<List<String>> searchRemainingRadicalsWithDbConnection(
|
||||
HAVING COUNT(DISTINCT "radical") = ?
|
||||
)
|
||||
''',
|
||||
[
|
||||
...radicals,
|
||||
radicals.length,
|
||||
],
|
||||
[...radicals, radicals.length],
|
||||
);
|
||||
|
||||
final remainingRadicals =
|
||||
queryResult.map((row) => row['radical'] as String).toList();
|
||||
final remainingRadicals = queryResult
|
||||
.map((row) => row['radical'] as String)
|
||||
.toList();
|
||||
|
||||
return remainingRadicals;
|
||||
}
|
||||
@@ -43,10 +41,7 @@ Future<List<String>> searchKanjiByRadicalsWithDbConnection(
|
||||
GROUP BY "kanji"
|
||||
HAVING COUNT(DISTINCT "radical") = ?
|
||||
''',
|
||||
[
|
||||
...radicals,
|
||||
radicals.length,
|
||||
],
|
||||
[...radicals, radicals.length],
|
||||
);
|
||||
|
||||
final kanji = queryResult.map((row) => row['kanji'] as String).toList();
|
||||
|
||||
@@ -61,22 +61,22 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElements;
|
||||
final Future<List<Map<String, Object?>>> readingElements_query =
|
||||
connection.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> readingElements_query = connection
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElements;
|
||||
final Future<List<Map<String, Object?>>> kanjiElements_query =
|
||||
connection.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> kanjiElements_query = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> jlptTags;
|
||||
final Future<List<Map<String, Object?>>> jlptTags_query = connection.query(
|
||||
@@ -86,12 +86,12 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> commonEntries;
|
||||
final Future<List<Map<String, Object?>>> commonEntries_query =
|
||||
connection.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> commonEntries_query = connection
|
||||
.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
senses_query.then((value) => senses = value),
|
||||
@@ -106,9 +106,9 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
final senseIds = senses.map((sense) => sense['senseId'] as int).toList();
|
||||
|
||||
late final List<Map<String, Object?>> senseAntonyms;
|
||||
final Future<List<Map<String, Object?>>> senseAntonyms_query =
|
||||
connection.rawQuery(
|
||||
"""
|
||||
final Future<List<Map<String, Object?>>> senseAntonyms_query = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseAntonyms}".senseId,
|
||||
"${JMdictTableNames.senseAntonyms}".ambiguous,
|
||||
@@ -125,16 +125,16 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
"${JMdictTableNames.senseAntonyms}"."senseId",
|
||||
"${JMdictTableNames.senseAntonyms}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseDialects;
|
||||
final Future<List<Map<String, Object?>>> senseDialects_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> senseDialects_query = connection
|
||||
.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseFields;
|
||||
final Future<List<Map<String, Object?>>> senseFields_query = connection.query(
|
||||
@@ -144,12 +144,12 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseGlossaries;
|
||||
final Future<List<Map<String, Object?>>> senseGlossaries_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> senseGlossaries_query = connection
|
||||
.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseInfos;
|
||||
final Future<List<Map<String, Object?>>> senseInfos_query = connection.query(
|
||||
@@ -161,10 +161,10 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
late final List<Map<String, Object?>> senseLanguageSources;
|
||||
final Future<List<Map<String, Object?>>> senseLanguageSources_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseMiscs;
|
||||
final Future<List<Map<String, Object?>>> senseMiscs_query = connection.query(
|
||||
@@ -183,23 +183,23 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
late final List<Map<String, Object?>> senseRestrictedToKanjis;
|
||||
final Future<List<Map<String, Object?>>> senseRestrictedToKanjis_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseRestrictedToReadings;
|
||||
final Future<List<Map<String, Object?>>> senseRestrictedToReadings_query =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseSeeAlsos;
|
||||
final Future<List<Map<String, Object?>>> senseSeeAlsos_query =
|
||||
connection.rawQuery(
|
||||
"""
|
||||
final Future<List<Map<String, Object?>>> senseSeeAlsos_query = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."ambiguous",
|
||||
@@ -216,16 +216,16 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> exampleSentences;
|
||||
final Future<List<Map<String, Object?>>> exampleSentences_query =
|
||||
connection.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> exampleSentences_query = connection
|
||||
.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
// Reading queries
|
||||
|
||||
@@ -236,18 +236,20 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
late final List<Map<String, Object?>> readingElementInfos;
|
||||
final Future<List<Map<String, Object?>>> readingElementInfos_query =
|
||||
connection.query(
|
||||
JMdictTableNames.readingInfo,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
JMdictTableNames.readingInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElementRestrictions;
|
||||
final Future<List<Map<String, Object?>>> readingElementRestrictions_query =
|
||||
connection.query(
|
||||
JMdictTableNames.readingRestriction,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
JMdictTableNames.readingRestriction,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
// Kanji queries
|
||||
|
||||
@@ -256,12 +258,13 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
.toList();
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElementInfos;
|
||||
final Future<List<Map<String, Object?>>> kanjiElementInfos_query =
|
||||
connection.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where: '(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
final Future<List<Map<String, Object?>>> kanjiElementInfos_query = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
senseAntonyms_query.then((value) => senseAntonyms = value),
|
||||
@@ -272,15 +275,18 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
senseLanguageSources_query.then((value) => senseLanguageSources = value),
|
||||
senseMiscs_query.then((value) => senseMiscs = value),
|
||||
sensePOSs_query.then((value) => sensePOSs = value),
|
||||
senseRestrictedToKanjis_query
|
||||
.then((value) => senseRestrictedToKanjis = value),
|
||||
senseRestrictedToReadings_query
|
||||
.then((value) => senseRestrictedToReadings = value),
|
||||
senseRestrictedToKanjis_query.then(
|
||||
(value) => senseRestrictedToKanjis = value,
|
||||
),
|
||||
senseRestrictedToReadings_query.then(
|
||||
(value) => senseRestrictedToReadings = value,
|
||||
),
|
||||
senseSeeAlsos_query.then((value) => senseSeeAlsos = value),
|
||||
exampleSentences_query.then((value) => exampleSentences = value),
|
||||
readingElementInfos_query.then((value) => readingElementInfos = value),
|
||||
readingElementRestrictions_query
|
||||
.then((value) => readingElementRestrictions = value),
|
||||
readingElementRestrictions_query.then(
|
||||
(value) => readingElementRestrictions = value,
|
||||
),
|
||||
kanjiElementInfos_query.then((value) => kanjiElementInfos = value),
|
||||
]);
|
||||
|
||||
|
||||
@@ -47,8 +47,10 @@ String _filterFTSSensitiveCharacters(String word) {
|
||||
int? offset,
|
||||
bool countOnly = false,
|
||||
}) {
|
||||
assert(tableName == JMdictTableNames.kanjiElement ||
|
||||
tableName == JMdictTableNames.readingElement);
|
||||
assert(
|
||||
tableName == JMdictTableNames.kanjiElement ||
|
||||
tableName == JMdictTableNames.readingElement,
|
||||
);
|
||||
assert(!countOnly || pageSize == null);
|
||||
assert(!countOnly || offset == null);
|
||||
assert(pageSize == null || pageSize > 0);
|
||||
@@ -105,7 +107,7 @@ String _filterFTSSensitiveCharacters(String word) {
|
||||
_filterFTSSensitiveCharacters(word),
|
||||
if (pageSize != null) pageSize,
|
||||
if (offset != null) offset,
|
||||
]
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
@@ -121,18 +123,19 @@ Future<List<ScoredEntryId>> _queryKanji(
|
||||
pageSize: pageSize,
|
||||
offset: offset,
|
||||
);
|
||||
return connection.rawQuery(query, args).then((result) => result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.toList());
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then(
|
||||
(result) => result
|
||||
.map(
|
||||
(row) =>
|
||||
ScoredEntryId(row['entryId'] as int, row['score'] as int),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> _queryKanjiCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) {
|
||||
Future<int> _queryKanjiCount(DatabaseExecutor connection, String word) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.kanjiElement,
|
||||
word,
|
||||
@@ -155,18 +158,19 @@ Future<List<ScoredEntryId>> _queryKana(
|
||||
pageSize: pageSize,
|
||||
offset: offset,
|
||||
);
|
||||
return connection.rawQuery(query, args).then((result) => result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.toList());
|
||||
return connection
|
||||
.rawQuery(query, args)
|
||||
.then(
|
||||
(result) => result
|
||||
.map(
|
||||
(row) =>
|
||||
ScoredEntryId(row['entryId'] as int, row['score'] as int),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> _queryKanaCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) {
|
||||
Future<int> _queryKanaCount(DatabaseExecutor connection, String word) {
|
||||
final (query, args) = _kanjiReadingTemplate(
|
||||
JMdictTableNames.readingElement,
|
||||
word,
|
||||
@@ -211,28 +215,15 @@ Future<List<ScoredEntryId>> _queryEnglish(
|
||||
OFFSET ?
|
||||
'''
|
||||
.trim(),
|
||||
[
|
||||
word,
|
||||
word,
|
||||
word,
|
||||
'%${word.replaceAll('%', '')}%',
|
||||
pageSize,
|
||||
offset,
|
||||
],
|
||||
[word, word, word, '%${word.replaceAll('%', '')}%', pageSize, offset],
|
||||
);
|
||||
|
||||
return result
|
||||
.map((row) => ScoredEntryId(
|
||||
row['entryId'] as int,
|
||||
row['score'] as int,
|
||||
))
|
||||
.map((row) => ScoredEntryId(row['entryId'] as int, row['score'] as int))
|
||||
.toList();
|
||||
}
|
||||
|
||||
Future<int> _queryEnglishCount(
|
||||
DatabaseExecutor connection,
|
||||
String word,
|
||||
) async {
|
||||
Future<int> _queryEnglishCount(DatabaseExecutor connection, String word) async {
|
||||
final result = await connection.rawQuery(
|
||||
'''
|
||||
SELECT
|
||||
@@ -242,9 +233,7 @@ Future<int> _queryEnglishCount(
|
||||
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?
|
||||
'''
|
||||
.trim(),
|
||||
[
|
||||
'%$word%',
|
||||
],
|
||||
['%$word%'],
|
||||
);
|
||||
|
||||
return result.first['count'] as int;
|
||||
@@ -261,46 +250,26 @@ Future<List<ScoredEntryId>> fetchEntryIds(
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
assert(
|
||||
word.isNotEmpty,
|
||||
'Word should not be empty when fetching entry IDs',
|
||||
);
|
||||
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
|
||||
|
||||
late final List<ScoredEntryId> entryIds;
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
entryIds = await _queryKanji(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryKanji(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
entryIds = await _queryKana(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryKana(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
entryIds = await _queryEnglish(
|
||||
connection,
|
||||
word,
|
||||
pageSize,
|
||||
offset,
|
||||
);
|
||||
entryIds = await _queryEnglish(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
default:
|
||||
throw UnimplementedError(
|
||||
'Search mode $searchMode is not implemented',
|
||||
);
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
;
|
||||
|
||||
@@ -316,41 +285,27 @@ Future<int?> fetchEntryIdCount(
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
assert(
|
||||
word.isNotEmpty,
|
||||
'Word should not be empty when fetching entry IDs',
|
||||
);
|
||||
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
|
||||
|
||||
late final int? entryIdCount;
|
||||
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
entryIdCount = await _queryKanjiCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryKanjiCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
entryIdCount = await _queryKanaCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryKanaCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
entryIdCount = await _queryEnglishCount(
|
||||
connection,
|
||||
word,
|
||||
);
|
||||
entryIdCount = await _queryEnglishCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
default:
|
||||
throw UnimplementedError(
|
||||
'Search mode $searchMode is not implemented',
|
||||
);
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
|
||||
return entryIdCount;
|
||||
|
||||
@@ -39,8 +39,9 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
}) {
|
||||
final List<WordSearchResult> results = [];
|
||||
|
||||
final commonEntryIds =
|
||||
commonEntries.map((entry) => entry['entryId'] as int).toSet();
|
||||
final commonEntryIds = commonEntries
|
||||
.map((entry) => entry['entryId'] as int)
|
||||
.toSet();
|
||||
|
||||
for (final scoredEntryId in entryIds) {
|
||||
final List<Map<String, Object?>> entryReadingElements = readingElements
|
||||
@@ -55,7 +56,8 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
final jlptLevel = entryJlptTags
|
||||
final jlptLevel =
|
||||
entryJlptTags
|
||||
.map((e) => JlptLevel.fromString(e['jlptLevel'] as String?))
|
||||
.sorted((a, b) => b.compareTo(a))
|
||||
.firstOrNull ??
|
||||
@@ -102,10 +104,7 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
readingInfo: entryReadingElementsGrouped.readingInfos,
|
||||
senses: entrySensesGrouped,
|
||||
jlptLevel: jlptLevel,
|
||||
sources: const WordSearchSources(
|
||||
jmdict: true,
|
||||
jmnedict: false,
|
||||
),
|
||||
sources: const WordSearchSources(jmdict: true, jmnedict: false),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -135,8 +134,9 @@ GroupedWordResult _regroup_words({
|
||||
}) {
|
||||
final List<WordSearchRuby> rubys = [];
|
||||
|
||||
final kanjiElements_ =
|
||||
kanjiElements.where((element) => element['entryId'] == entryId).toList();
|
||||
final kanjiElements_ = kanjiElements
|
||||
.where((element) => element['entryId'] == entryId)
|
||||
.toList();
|
||||
|
||||
final readingElements_ = readingElements
|
||||
.where((element) => element['entryId'] == entryId)
|
||||
@@ -148,9 +148,7 @@ GroupedWordResult _regroup_words({
|
||||
|
||||
for (final readingElement in readingElements_) {
|
||||
if (readingElement['doesNotMatchKanji'] == 1 || kanjiElements_.isEmpty) {
|
||||
final ruby = WordSearchRuby(
|
||||
base: readingElement['reading'] as String,
|
||||
);
|
||||
final ruby = WordSearchRuby(base: readingElement['reading'] as String);
|
||||
rubys.add(ruby);
|
||||
|
||||
continue;
|
||||
@@ -169,18 +167,12 @@ GroupedWordResult _regroup_words({
|
||||
continue;
|
||||
}
|
||||
|
||||
final ruby = WordSearchRuby(
|
||||
base: kanji,
|
||||
furigana: reading,
|
||||
);
|
||||
final ruby = WordSearchRuby(base: kanji, furigana: reading);
|
||||
rubys.add(ruby);
|
||||
}
|
||||
}
|
||||
|
||||
assert(
|
||||
rubys.isNotEmpty,
|
||||
'No readings found for entryId: $entryId',
|
||||
);
|
||||
assert(rubys.isNotEmpty, 'No readings found for entryId: $entryId');
|
||||
|
||||
final Map<int, String> readingElementIdsToReading = {
|
||||
for (final element in readingElements_)
|
||||
@@ -210,7 +202,7 @@ GroupedWordResult _regroup_words({
|
||||
kanjiInfos: {
|
||||
for (final kei in kanjiElementInfos_)
|
||||
kanjiElementIdsToReading[kei['elementId'] as int]!:
|
||||
JMdictKanjiInfo.fromId(kei['info'] as String),
|
||||
JMdictKanjiInfo.fromId(kei['info'] as String),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -230,28 +222,38 @@ List<WordSearchSense> _regroup_senses({
|
||||
required List<Map<String, Object?>> senseSeeAlsos,
|
||||
required List<Map<String, Object?>> exampleSentences,
|
||||
}) {
|
||||
final groupedSenseAntonyms =
|
||||
senseAntonyms.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseDialects =
|
||||
senseDialects.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseFields =
|
||||
senseFields.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseGlossaries =
|
||||
senseGlossaries.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseInfos =
|
||||
senseInfos.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseLanguageSources =
|
||||
senseLanguageSources.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseMiscs =
|
||||
senseMiscs.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSensePOSs =
|
||||
sensePOSs.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis
|
||||
.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseAntonyms = senseAntonyms.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseDialects = senseDialects.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseFields = senseFields.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseGlossaries = senseGlossaries.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseInfos = senseInfos.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseLanguageSources = senseLanguageSources.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseMiscs = senseMiscs.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSensePOSs = sensePOSs.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
final groupedSenseRestrictedToReadings = senseRestrictedToReadings
|
||||
.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseSeeAlsos =
|
||||
senseSeeAlsos.groupListsBy((element) => element['senseId'] as int);
|
||||
final groupedSenseSeeAlsos = senseSeeAlsos.groupListsBy(
|
||||
(element) => element['senseId'] as int,
|
||||
);
|
||||
|
||||
final List<WordSearchSense> result = [];
|
||||
for (final sense in senses) {
|
||||
@@ -272,43 +274,53 @@ List<WordSearchSense> _regroup_senses({
|
||||
|
||||
final resultSense = WordSearchSense(
|
||||
englishDefinitions: glossaries.map((e) => e['phrase'] as String).toList(),
|
||||
partsOfSpeech:
|
||||
pos.map((e) => JMdictPOS.fromId(e['pos'] as String)).toList(),
|
||||
partsOfSpeech: pos
|
||||
.map((e) => JMdictPOS.fromId(e['pos'] as String))
|
||||
.toList(),
|
||||
seeAlso: seeAlsos
|
||||
.map((e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
))
|
||||
.map(
|
||||
(e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
antonyms: antonyms
|
||||
.map((e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
))
|
||||
.map(
|
||||
(e) => WordSearchXrefEntry(
|
||||
entryId: e['xrefEntryId'] as int,
|
||||
baseWord: e['base'] as String,
|
||||
furigana: e['furigana'] as String?,
|
||||
ambiguous: e['ambiguous'] == 1,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
restrictedToReading: restrictedToReadings
|
||||
.map((e) => e['reading'] as String)
|
||||
.toList(),
|
||||
restrictedToKanji: restrictedToKanjis
|
||||
.map((e) => e['kanji'] as String)
|
||||
.toList(),
|
||||
fields: fields
|
||||
.map((e) => JMdictField.fromId(e['field'] as String))
|
||||
.toList(),
|
||||
restrictedToReading:
|
||||
restrictedToReadings.map((e) => e['reading'] as String).toList(),
|
||||
restrictedToKanji:
|
||||
restrictedToKanjis.map((e) => e['kanji'] as String).toList(),
|
||||
fields:
|
||||
fields.map((e) => JMdictField.fromId(e['field'] as String)).toList(),
|
||||
dialects: dialects
|
||||
.map((e) => JMdictDialect.fromId(e['dialect'] as String))
|
||||
.toList(),
|
||||
misc: miscs.map((e) => JMdictMisc.fromId(e['misc'] as String)).toList(),
|
||||
info: infos.map((e) => e['info'] as String).toList(),
|
||||
languageSource: languageSources
|
||||
.map((e) => WordSearchSenseLanguageSource(
|
||||
language: e['language'] as String,
|
||||
phrase: e['phrase'] as String?,
|
||||
fullyDescribesSense: e['fullyDescribesSense'] == 1,
|
||||
constructedFromSmallerWords:
|
||||
e['constructedFromSmallerWords'] == 1,
|
||||
))
|
||||
.map(
|
||||
(e) => WordSearchSenseLanguageSource(
|
||||
language: e['language'] as String,
|
||||
phrase: e['phrase'] as String?,
|
||||
fullyDescribesSense: e['fullyDescribesSense'] == 1,
|
||||
constructedFromSmallerWords:
|
||||
e['constructedFromSmallerWords'] == 1,
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
|
||||
|
||||
@@ -13,14 +13,7 @@ import 'package:jadb/search/word_search/regrouping.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
enum SearchMode {
|
||||
Auto,
|
||||
English,
|
||||
Kanji,
|
||||
MixedKanji,
|
||||
Kana,
|
||||
MixedKana,
|
||||
}
|
||||
enum SearchMode { Auto, English, Kanji, MixedKanji, Kana, MixedKana }
|
||||
|
||||
Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
@@ -49,9 +42,9 @@ Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
|
||||
final LinearWordQueryData linearWordQueryData =
|
||||
await fetchLinearWordQueryData(
|
||||
connection,
|
||||
entryIds.map((e) => e.entryId).toList(),
|
||||
);
|
||||
connection,
|
||||
entryIds.map((e) => e.entryId).toList(),
|
||||
);
|
||||
|
||||
final result = regroupWordSearchResults(
|
||||
entryIds: entryIds,
|
||||
@@ -106,20 +99,19 @@ Future<WordSearchResult?> getWordByIdWithDbConnection(
|
||||
return null;
|
||||
}
|
||||
|
||||
final exists = await connection.rawQuery(
|
||||
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
|
||||
[id],
|
||||
).then((value) => value.isNotEmpty && value.first.values.first == 1);
|
||||
final exists = await connection
|
||||
.rawQuery(
|
||||
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
|
||||
[id],
|
||||
)
|
||||
.then((value) => value.isNotEmpty && value.first.values.first == 1);
|
||||
|
||||
if (!exists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final LinearWordQueryData linearWordQueryData =
|
||||
await fetchLinearWordQueryData(
|
||||
connection,
|
||||
[id],
|
||||
);
|
||||
await fetchLinearWordQueryData(connection, [id]);
|
||||
|
||||
final result = regroupWordSearchResults(
|
||||
entryIds: [ScoredEntryId(id, 0)],
|
||||
|
||||
@@ -20,23 +20,23 @@ abstract class JMdictTableNames {
|
||||
static const String senseSeeAlso = 'JMdict_SenseSeeAlso';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
entry,
|
||||
kanjiElement,
|
||||
kanjiInfo,
|
||||
readingElement,
|
||||
readingInfo,
|
||||
readingRestriction,
|
||||
sense,
|
||||
senseAntonyms,
|
||||
senseDialect,
|
||||
senseField,
|
||||
senseGlossary,
|
||||
senseInfo,
|
||||
senseMisc,
|
||||
sensePOS,
|
||||
senseLanguageSource,
|
||||
senseRestrictedToKanji,
|
||||
senseRestrictedToReading,
|
||||
senseSeeAlso
|
||||
};
|
||||
entry,
|
||||
kanjiElement,
|
||||
kanjiInfo,
|
||||
readingElement,
|
||||
readingInfo,
|
||||
readingRestriction,
|
||||
sense,
|
||||
senseAntonyms,
|
||||
senseDialect,
|
||||
senseField,
|
||||
senseGlossary,
|
||||
senseInfo,
|
||||
senseMisc,
|
||||
sensePOS,
|
||||
senseLanguageSource,
|
||||
senseRestrictedToKanji,
|
||||
senseRestrictedToReading,
|
||||
senseSeeAlso,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,19 +17,19 @@ abstract class KANJIDICTableNames {
|
||||
static const String nanori = 'KANJIDIC_Nanori';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
character,
|
||||
radicalName,
|
||||
codepoint,
|
||||
radical,
|
||||
strokeMiscount,
|
||||
variant,
|
||||
dictionaryReference,
|
||||
dictionaryReferenceMoro,
|
||||
queryCode,
|
||||
reading,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
meaning,
|
||||
nanori
|
||||
};
|
||||
character,
|
||||
radicalName,
|
||||
codepoint,
|
||||
radical,
|
||||
strokeMiscount,
|
||||
variant,
|
||||
dictionaryReference,
|
||||
dictionaryReferenceMoro,
|
||||
queryCode,
|
||||
reading,
|
||||
kunyomi,
|
||||
onyomi,
|
||||
meaning,
|
||||
nanori,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
abstract class RADKFILETableNames {
|
||||
static const String radkfile = 'RADKFILE';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
radkfile,
|
||||
};
|
||||
static Set<String> get allTables => {radkfile};
|
||||
}
|
||||
|
||||
@@ -288,15 +288,8 @@ extension on DateTime {
|
||||
}
|
||||
}
|
||||
|
||||
String get japaneseWeekdayPrefix => [
|
||||
'月',
|
||||
'火',
|
||||
'水',
|
||||
'木',
|
||||
'金',
|
||||
'土',
|
||||
'日',
|
||||
][weekday - 1];
|
||||
String get japaneseWeekdayPrefix =>
|
||||
['月', '火', '水', '木', '金', '土', '日'][weekday - 1];
|
||||
|
||||
/// Returns the date in Japanese format.
|
||||
String japaneseDate({bool showWeekday = false}) =>
|
||||
|
||||
@@ -12,10 +12,7 @@ enum WordClass {
|
||||
input,
|
||||
}
|
||||
|
||||
enum LemmatizationRuleType {
|
||||
prefix,
|
||||
suffix,
|
||||
}
|
||||
enum LemmatizationRuleType { prefix, suffix }
|
||||
|
||||
class LemmatizationRule {
|
||||
final String name;
|
||||
@@ -46,18 +43,18 @@ class LemmatizationRule {
|
||||
lookAheadBehind = const [''],
|
||||
LemmatizationRuleType type = LemmatizationRuleType.suffix,
|
||||
}) : this(
|
||||
name: name,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
pattern: replacement != null ? [replacement] : null
|
||||
},
|
||||
type: type,
|
||||
lookAheadBehind: lookAheadBehind,
|
||||
),
|
||||
validChildClasses: validChildClasses,
|
||||
terminal: terminal,
|
||||
wordClass: wordClass,
|
||||
);
|
||||
name: name,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
pattern: replacement != null ? [replacement] : null,
|
||||
},
|
||||
type: type,
|
||||
lookAheadBehind: lookAheadBehind,
|
||||
),
|
||||
validChildClasses: validChildClasses,
|
||||
terminal: terminal,
|
||||
wordClass: wordClass,
|
||||
);
|
||||
}
|
||||
|
||||
/// Represents a set of patterns for matching allomorphs in a word.
|
||||
@@ -132,8 +129,8 @@ class AllomorphPattern {
|
||||
if (word.startsWith(p as String)) {
|
||||
return patterns[affix] != null
|
||||
? patterns[affix]!
|
||||
.map((s) => s + word.substring(affix.length))
|
||||
.toList()
|
||||
.map((s) => s + word.substring(affix.length))
|
||||
.toList()
|
||||
: [word.substring(affix.length)];
|
||||
}
|
||||
break;
|
||||
@@ -239,9 +236,6 @@ Lemmatized lemmatize(String word) {
|
||||
return Lemmatized(
|
||||
original: word,
|
||||
rule: inputRule,
|
||||
children: _lemmatize(
|
||||
inputRule,
|
||||
word,
|
||||
),
|
||||
children: _lemmatize(inputRule, word),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -534,16 +534,14 @@ bool _hasDoubleConsonant(String for_conversion, int length) =>
|
||||
RegExp(r'^([kgsztdnbpmyrlwchf])\1$').hasMatch(for_conversion));
|
||||
|
||||
String transliterateLatinToHiragana(String latin) {
|
||||
String romaji =
|
||||
latin.toLowerCase().replaceAll('mb', 'nb').replaceAll('mp', 'np');
|
||||
String romaji = latin
|
||||
.toLowerCase()
|
||||
.replaceAll('mb', 'nb')
|
||||
.replaceAll('mp', 'np');
|
||||
String kana = '';
|
||||
|
||||
while (romaji.isNotEmpty) {
|
||||
final lengths = [
|
||||
if (romaji.length > 2) 3,
|
||||
if (romaji.length > 1) 2,
|
||||
1,
|
||||
];
|
||||
final lengths = [if (romaji.length > 2) 3, if (romaji.length > 1) 2, 1];
|
||||
|
||||
for (final length in lengths) {
|
||||
String? mora;
|
||||
@@ -579,11 +577,11 @@ String _transposeCodepointsInRange(
|
||||
int distance,
|
||||
int rangeStart,
|
||||
int rangeEnd,
|
||||
) =>
|
||||
String.fromCharCodes(
|
||||
text.codeUnits
|
||||
.map((c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0)),
|
||||
);
|
||||
) => String.fromCharCodes(
|
||||
text.codeUnits.map(
|
||||
(c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0),
|
||||
),
|
||||
);
|
||||
|
||||
String transliterateKanaToLatin(String kana) =>
|
||||
transliterateHiraganaToLatin(transliterateKatakanaToHiragana(kana));
|
||||
@@ -599,12 +597,7 @@ String transliterateHiraganaToKatakana(String hiragana) =>
|
||||
|
||||
String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
|
||||
_transposeCodepointsInRange(
|
||||
_transposeCodepointsInRange(
|
||||
halfwidth,
|
||||
-65248,
|
||||
65281,
|
||||
65374,
|
||||
),
|
||||
_transposeCodepointsInRange(halfwidth, -65248, 65281, 65374),
|
||||
-12256,
|
||||
12288,
|
||||
12288,
|
||||
@@ -612,12 +605,7 @@ String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
|
||||
|
||||
String transliterateHalfwidthRomajiToFullwidth(String halfwidth) =>
|
||||
_transposeCodepointsInRange(
|
||||
_transposeCodepointsInRange(
|
||||
halfwidth,
|
||||
65248,
|
||||
33,
|
||||
126,
|
||||
),
|
||||
_transposeCodepointsInRange(halfwidth, 65248, 33, 126),
|
||||
12256,
|
||||
32,
|
||||
32,
|
||||
|
||||
@@ -7,25 +7,22 @@ void main() {
|
||||
test("Filter kanji", () async {
|
||||
final connection = await setup_database_connection();
|
||||
|
||||
final result = await connection.filterKanji(
|
||||
[
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"漢",
|
||||
"字",
|
||||
"地",
|
||||
"字",
|
||||
"か",
|
||||
"な",
|
||||
".",
|
||||
"!",
|
||||
"@",
|
||||
";",
|
||||
"々",
|
||||
],
|
||||
deduplicate: false,
|
||||
);
|
||||
final result = await connection.filterKanji([
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"漢",
|
||||
"字",
|
||||
"地",
|
||||
"字",
|
||||
"か",
|
||||
"な",
|
||||
".",
|
||||
"!",
|
||||
"@",
|
||||
";",
|
||||
"々",
|
||||
], deduplicate: false);
|
||||
|
||||
expect(result.join(), "漢字地字");
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user