treewide: dart format

This commit is contained in:
2025-07-16 15:23:04 +02:00
parent 3a2adf0367
commit 29a3a6aafb
47 changed files with 873 additions and 1045 deletions

View File

@@ -17,13 +17,13 @@ abstract class Element extends SQLWritable {
});
Map<String, Object?> get sqlValue => {
'reading': reading,
'news': news,
'ichi': ichi,
'spec': spec,
'gai': gai,
'nf': nf,
};
'reading': reading,
'news': news,
'ichi': ichi,
'spec': spec,
'gai': gai,
'nf': nf,
};
}
class KanjiElement extends Element {
@@ -40,19 +40,19 @@ class KanjiElement extends Element {
int? gai,
int? nf,
}) : super(
reading: reading,
news: news,
ichi: ichi,
spec: spec,
gai: gai,
nf: nf,
);
reading: reading,
news: news,
ichi: ichi,
spec: spec,
gai: gai,
nf: nf,
);
@override
Map<String, Object?> get sqlValue => {
...super.sqlValue,
'orderNum': orderNum,
};
...super.sqlValue,
'orderNum': orderNum,
};
}
class ReadingElement extends Element {
@@ -73,20 +73,20 @@ class ReadingElement extends Element {
int? gai,
int? nf,
}) : super(
reading: reading,
news: news,
ichi: ichi,
spec: spec,
gai: gai,
nf: nf,
);
reading: reading,
news: news,
ichi: ichi,
spec: spec,
gai: gai,
nf: nf,
);
@override
Map<String, Object?> get sqlValue => {
...super.sqlValue,
'orderNum': orderNum,
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
};
...super.sqlValue,
'orderNum': orderNum,
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
};
}
class LanguageSource extends SQLWritable {
@@ -104,11 +104,11 @@ class LanguageSource extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'language': language,
'phrase': phrase,
'fullyDescribesSense': fullyDescribesSense,
'constructedFromSmallerWords': constructedFromSmallerWords,
};
'language': language,
'phrase': phrase,
'fullyDescribesSense': fullyDescribesSense,
'constructedFromSmallerWords': constructedFromSmallerWords,
};
}
class Glossary extends SQLWritable {
@@ -116,48 +116,40 @@ class Glossary extends SQLWritable {
final String phrase;
final String? type;
const Glossary({
required this.language,
required this.phrase,
this.type,
});
const Glossary({required this.language, required this.phrase, this.type});
Map<String, Object?> get sqlValue => {
'language': language,
'phrase': phrase,
'type': type,
};
'language': language,
'phrase': phrase,
'type': type,
};
}
final kanaRegex =
RegExp(r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$', unicode: true);
final kanaRegex = RegExp(
r'^[\p{Script=Katakana}\p{Script=Hiragana}ー]+$',
unicode: true,
);
class XRefParts {
final String? kanjiRef;
final String? readingRef;
final int? senseOrderNum;
const XRefParts({
this.kanjiRef,
this.readingRef,
this.senseOrderNum,
}) : assert(kanjiRef != null || readingRef != null);
const XRefParts({this.kanjiRef, this.readingRef, this.senseOrderNum})
: assert(kanjiRef != null || readingRef != null);
Map<String, Object?> toJson() => {
'kanjiRef': kanjiRef,
'readingRef': readingRef,
'senseOrderNum': senseOrderNum,
};
'kanjiRef': kanjiRef,
'readingRef': readingRef,
'senseOrderNum': senseOrderNum,
};
}
class XRef {
final String entryId;
final String reading;
const XRef({
required this.entryId,
required this.reading,
});
const XRef({required this.entryId, required this.reading});
}
class Sense extends SQLWritable {
@@ -193,9 +185,9 @@ class Sense extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'senseId': senseId,
'orderNum': orderNum,
};
'senseId': senseId,
'orderNum': orderNum,
};
bool get isEmpty =>
antonyms.isEmpty &&

View File

@@ -18,8 +18,9 @@ ResolvedXref resolveXref(
XRefParts xref,
) {
List<Entry> candidateEntries = switch ((xref.kanjiRef, xref.readingRef)) {
(null, null) =>
throw Exception('Xref $xref has no kanji or reading reference'),
(null, null) => throw Exception(
'Xref $xref has no kanji or reading reference',
),
(String k, null) => entriesByKanji[k]!.toList(),
(null, String r) => entriesByReading[r]!.toList(),
(String k, String r) =>
@@ -28,8 +29,9 @@ ResolvedXref resolveXref(
// Filter out entries that don't have the number of senses specified in the xref
if (xref.senseOrderNum != null) {
candidateEntries
.retainWhere((entry) => entry.senses.length >= xref.senseOrderNum!);
candidateEntries.retainWhere(
(entry) => entry.senses.length >= xref.senseOrderNum!,
);
}
// If the xref has a reading ref but no kanji ref, and there are multiple
@@ -38,8 +40,9 @@ ResolvedXref resolveXref(
if (xref.kanjiRef == null &&
xref.readingRef != null &&
candidateEntries.length > 1) {
final candidatesWithEmptyKanji =
candidateEntries.where((entry) => entry.kanji.length == 0).toList();
final candidatesWithEmptyKanji = candidateEntries
.where((entry) => entry.kanji.length == 0)
.toList();
if (candidatesWithEmptyKanji.isNotEmpty) {
candidateEntries = candidatesWithEmptyKanji;
@@ -80,20 +83,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
elementId++;
b.insert(
JMdictTableNames.kanjiElement,
k.sqlValue..addAll({
'entryId': e.entryId,
'elementId': elementId,
}),
k.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
);
for (final i in k.info) {
b.insert(
JMdictTableNames.kanjiInfo,
{
'elementId': elementId,
'info': i,
},
);
b.insert(JMdictTableNames.kanjiInfo, {
'elementId': elementId,
'info': i,
});
}
}
@@ -101,29 +98,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
elementId++;
b.insert(
JMdictTableNames.readingElement,
r.sqlValue..addAll({
'entryId': e.entryId,
'elementId': elementId,
}),
r.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
);
for (final i in r.info) {
b.insert(
JMdictTableNames.readingInfo,
{
'elementId': elementId,
'info': i,
},
);
b.insert(JMdictTableNames.readingInfo, {
'elementId': elementId,
'info': i,
});
}
for (final res in r.restrictions) {
b.insert(
JMdictTableNames.readingRestriction,
{
'elementId': elementId,
'restriction': res,
},
);
b.insert(JMdictTableNames.readingRestriction, {
'elementId': elementId,
'restriction': res,
});
}
}
}
@@ -136,16 +124,20 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
for (final e in entries) {
for (final s in e.senses) {
b.insert(
JMdictTableNames.sense, s.sqlValue..addAll({'entryId': e.entryId}));
JMdictTableNames.sense,
s.sqlValue..addAll({'entryId': e.entryId}),
);
for (final d in s.dialects) {
b.insert(
JMdictTableNames.senseDialect,
{'senseId': s.senseId, 'dialect': d},
);
b.insert(JMdictTableNames.senseDialect, {
'senseId': s.senseId,
'dialect': d,
});
}
for (final f in s.fields) {
b.insert(
JMdictTableNames.senseField, {'senseId': s.senseId, 'field': f});
b.insert(JMdictTableNames.senseField, {
'senseId': s.senseId,
'field': f,
});
}
for (final i in s.info) {
b.insert(JMdictTableNames.senseInfo, {'senseId': s.senseId, 'info': i});
@@ -157,16 +149,18 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
b.insert(JMdictTableNames.sensePOS, {'senseId': s.senseId, 'pos': p});
}
for (final rk in s.restrictedToKanji) {
b.insert(
JMdictTableNames.senseRestrictedToKanji,
{'entryId': e.entryId, 'senseId': s.senseId, 'kanji': rk},
);
b.insert(JMdictTableNames.senseRestrictedToKanji, {
'entryId': e.entryId,
'senseId': s.senseId,
'kanji': rk,
});
}
for (final rr in s.restrictedToReading) {
b.insert(
JMdictTableNames.senseRestrictedToReading,
{'entryId': e.entryId, 'senseId': s.senseId, 'reading': rr},
);
b.insert(JMdictTableNames.senseRestrictedToReading, {
'entryId': e.entryId,
'senseId': s.senseId,
'reading': rr,
});
}
for (final ls in s.languageSource) {
b.insert(
@@ -220,17 +214,14 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
xref,
);
b.insert(
JMdictTableNames.senseSeeAlso,
{
'senseId': s.senseId,
'xrefEntryId': resolvedEntry.entry.entryId,
'seeAlsoKanji': xref.kanjiRef,
'seeAlsoReading': xref.readingRef,
'seeAlsoSense': xref.senseOrderNum,
'ambiguous': resolvedEntry.ambiguous,
},
);
b.insert(JMdictTableNames.senseSeeAlso, {
'senseId': s.senseId,
'xrefEntryId': resolvedEntry.entry.entryId,
'seeAlsoKanji': xref.kanjiRef,
'seeAlsoReading': xref.readingRef,
'seeAlsoSense': xref.senseOrderNum,
'ambiguous': resolvedEntry.ambiguous,
});
}
for (final ant in s.antonyms) {

View File

@@ -16,7 +16,8 @@ List<int?> getPriorityValues(XmlElement e, String prefix) {
spec = int.parse(txt.substring(4));
else if (txt.startsWith('gai'))
gai = int.parse(txt.substring(3));
else if (txt.startsWith('nf')) nf = int.parse(txt.substring(2));
else if (txt.startsWith('nf'))
nf = int.parse(txt.substring(2));
}
return [news, ichi, spec, gai, nf];
}
@@ -46,10 +47,7 @@ XRefParts parseXrefParts(String s) {
);
}
} else {
result = XRefParts(
kanjiRef: parts[0],
readingRef: parts[1],
);
result = XRefParts(kanjiRef: parts[0], readingRef: parts[1]);
}
break;
@@ -102,8 +100,9 @@ List<Entry> parseJMDictData(XmlElement root) {
for (final (orderNum, r_ele) in entry.findElements('r_ele').indexed) {
final re_pri = getPriorityValues(r_ele, 're');
final readingDoesNotMatchKanji =
r_ele.findElements('re_nokanji').isNotEmpty;
final readingDoesNotMatchKanji = r_ele
.findElements('re_nokanji')
.isNotEmpty;
readingEls.add(
ReadingElement(
orderNum: orderNum + 1,
@@ -112,8 +111,10 @@ List<Entry> parseJMDictData(XmlElement root) {
.findElements('re_inf')
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
.toList(),
restrictions:
r_ele.findElements('re_restr').map((e) => e.innerText).toList(),
restrictions: r_ele
.findElements('re_restr')
.map((e) => e.innerText)
.toList(),
reading: r_ele.findElements('reb').first.innerText,
news: re_pri[0],
ichi: re_pri[1],
@@ -129,10 +130,14 @@ List<Entry> parseJMDictData(XmlElement root) {
final result = Sense(
senseId: senseId,
orderNum: orderNum + 1,
restrictedToKanji:
sense.findElements('stagk').map((e) => e.innerText).toList(),
restrictedToReading:
sense.findElements('stagr').map((e) => e.innerText).toList(),
restrictedToKanji: sense
.findElements('stagk')
.map((e) => e.innerText)
.toList(),
restrictedToReading: sense
.findElements('stagr')
.map((e) => e.innerText)
.toList(),
pos: sense
.findElements('pos')
.map((e) => e.innerText.substring(1, e.innerText.length - 1))

View File

@@ -13,42 +13,33 @@ class CodePoint extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'type': type,
'codepoint': codepoint,
};
'kanji': kanji,
'type': type,
'codepoint': codepoint,
};
}
class Radical extends SQLWritable {
final String kanji;
final int radicalId;
const Radical({
required this.kanji,
required this.radicalId,
});
const Radical({required this.kanji, required this.radicalId});
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'radicalId': radicalId,
};
Map<String, Object?> get sqlValue => {'kanji': kanji, 'radicalId': radicalId};
}
class StrokeMiscount extends SQLWritable {
final String kanji;
final int strokeCount;
const StrokeMiscount({
required this.kanji,
required this.strokeCount,
});
const StrokeMiscount({required this.kanji, required this.strokeCount});
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'strokeCount': strokeCount,
};
'kanji': kanji,
'strokeCount': strokeCount,
};
}
class Variant extends SQLWritable {
@@ -64,10 +55,10 @@ class Variant extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'type': type,
'variant': variant,
};
'kanji': kanji,
'type': type,
'variant': variant,
};
}
class DictionaryReference extends SQLWritable {
@@ -83,10 +74,10 @@ class DictionaryReference extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'type': type,
'ref': ref,
};
'kanji': kanji,
'type': type,
'ref': ref,
};
}
class DictionaryReferenceMoro extends SQLWritable {
@@ -104,11 +95,11 @@ class DictionaryReferenceMoro extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'ref': ref,
'volume': volume,
'page': page,
};
'kanji': kanji,
'ref': ref,
'volume': volume,
'page': page,
};
}
class QueryCode extends SQLWritable {
@@ -126,11 +117,11 @@ class QueryCode extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'code': code,
'type': type,
'skipMisclassification': skipMisclassification,
};
'kanji': kanji,
'code': code,
'type': type,
'skipMisclassification': skipMisclassification,
};
}
class Reading extends SQLWritable {
@@ -146,10 +137,10 @@ class Reading extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'type': type,
'reading': reading,
};
'kanji': kanji,
'type': type,
'reading': reading,
};
}
class Kunyomi extends SQLWritable {
@@ -165,10 +156,10 @@ class Kunyomi extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'yomi': yomi,
'isJouyou': isJouyou,
};
'kanji': kanji,
'yomi': yomi,
'isJouyou': isJouyou,
};
}
class Onyomi extends SQLWritable {
@@ -186,11 +177,11 @@ class Onyomi extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'yomi': yomi,
'isJouyou': isJouyou,
'type': type,
};
'kanji': kanji,
'yomi': yomi,
'isJouyou': isJouyou,
'type': type,
};
}
class Meaning extends SQLWritable {
@@ -206,10 +197,10 @@ class Meaning extends SQLWritable {
@override
Map<String, Object?> get sqlValue => {
'kanji': kanji,
'language': language,
'meaning': meaning,
};
'kanji': kanji,
'language': language,
'meaning': meaning,
};
}
class Character extends SQLWritable {
@@ -255,10 +246,10 @@ class Character extends SQLWritable {
});
Map<String, Object?> get sqlValue => {
'literal': literal,
'grade': grade,
'strokeCount': strokeCount,
'frequency': frequency,
'jlpt': jlpt,
};
'literal': literal,
'grade': grade,
'strokeCount': strokeCount,
'frequency': frequency,
'jlpt': jlpt,
};
}

View File

@@ -19,10 +19,7 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
assert(c.radical != null, 'Radical name without radical');
b.insert(
KANJIDICTableNames.radicalName,
{
'radicalId': c.radical!.radicalId,
'name': n,
},
{'radicalId': c.radical!.radicalId, 'name': n},
conflictAlgorithm: ConflictAlgorithm.ignore,
);
}
@@ -34,13 +31,10 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
b.insert(KANJIDICTableNames.radical, c.radical!.sqlValue);
}
for (final sm in c.strokeMiscounts) {
b.insert(
KANJIDICTableNames.strokeMiscount,
{
'kanji': c.literal,
'strokeCount': sm,
},
);
b.insert(KANJIDICTableNames.strokeMiscount, {
'kanji': c.literal,
'strokeCount': sm,
});
}
for (final v in c.variants) {
b.insert(KANJIDICTableNames.variant, v.sqlValue);
@@ -64,24 +58,24 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
}
for (final (i, y) in c.kunyomi.indexed) {
b.insert(
KANJIDICTableNames.kunyomi, y.sqlValue..addAll({'orderNum': i + 1}));
KANJIDICTableNames.kunyomi,
y.sqlValue..addAll({'orderNum': i + 1}),
);
}
for (final (i, y) in c.onyomi.indexed) {
b.insert(
KANJIDICTableNames.onyomi, y.sqlValue..addAll({'orderNum': i + 1}));
KANJIDICTableNames.onyomi,
y.sqlValue..addAll({'orderNum': i + 1}),
);
}
for (final (i, m) in c.meanings.indexed) {
b.insert(
KANJIDICTableNames.meaning, m.sqlValue..addAll({'orderNum': i + 1}));
KANJIDICTableNames.meaning,
m.sqlValue..addAll({'orderNum': i + 1}),
);
}
for (final n in c.nanori) {
b.insert(
KANJIDICTableNames.nanori,
{
'kanji': c.literal,
'nanori': n,
},
);
b.insert(KANJIDICTableNames.nanori, {'kanji': c.literal, 'nanori': n});
}
}
await b.commit(noResult: true);

View File

@@ -19,18 +19,24 @@ List<Character> parseKANJIDICData(XmlElement root) {
result.add(
Character(
literal: kanji,
strokeCount:
int.parse(misc.findElements('stroke_count').first.innerText),
strokeCount: int.parse(
misc.findElements('stroke_count').first.innerText,
),
grade: int.tryParse(
misc.findElements('grade').firstOrNull?.innerText ?? ''),
misc.findElements('grade').firstOrNull?.innerText ?? '',
),
frequency: int.tryParse(
misc.findElements('freq').firstOrNull?.innerText ?? ''),
misc.findElements('freq').firstOrNull?.innerText ?? '',
),
jlpt: int.tryParse(
misc.findElements('jlpt').firstOrNull?.innerText ?? '',
),
radicalName:
misc.findElements('rad_name').map((e) => e.innerText).toList(),
codepoints: codepoint
radicalName: misc
.findElements('rad_name')
.map((e) => e.innerText)
.toList(),
codepoints:
codepoint
?.findElements('cp_value')
.map(
(e) => CodePoint(
@@ -45,10 +51,7 @@ List<Character> parseKANJIDICData(XmlElement root) {
?.findElements('rad_value')
.where((e) => e.getAttribute('rad_type') == 'classical')
.map(
(e) => Radical(
kanji: kanji,
radicalId: int.parse(e.innerText),
),
(e) => Radical(kanji: kanji, radicalId: int.parse(e.innerText)),
)
.firstOrNull,
strokeMiscounts: misc
@@ -66,7 +69,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
),
)
.toList(),
dictionaryReferences: dic_number
dictionaryReferences:
dic_number
?.findElements('dic_ref')
.where((e) => e.getAttribute('dr_type') != 'moro')
.map(
@@ -78,7 +82,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
)
.toList() ??
[],
dictionaryReferencesMoro: dic_number
dictionaryReferencesMoro:
dic_number
?.findElements('dic_ref')
.where((e) => e.getAttribute('dr_type') == 'moro')
.map(
@@ -102,7 +107,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
),
)
.toList(),
readings: reading_meaning
readings:
reading_meaning
?.findAllElements('reading')
.where(
(e) =>
@@ -117,7 +123,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
)
.toList() ??
[],
kunyomi: reading_meaning
kunyomi:
reading_meaning
?.findAllElements('reading')
.where((e) => e.getAttribute('r_type') == 'ja_kun')
.map(
@@ -129,19 +136,22 @@ List<Character> parseKANJIDICData(XmlElement root) {
)
.toList() ??
[],
onyomi: reading_meaning
onyomi:
reading_meaning
?.findAllElements('reading')
.where((e) => e.getAttribute('r_type') == 'ja_on')
.map(
(e) => Onyomi(
kanji: kanji,
yomi: transliterateKatakanaToHiragana(e.innerText),
isJouyou: e.getAttribute('r_status') == 'jy',
type: e.getAttribute('on_type')),
kanji: kanji,
yomi: transliterateKatakanaToHiragana(e.innerText),
isJouyou: e.getAttribute('r_status') == 'jy',
type: e.getAttribute('on_type'),
),
)
.toList() ??
[],
meanings: reading_meaning
meanings:
reading_meaning
?.findAllElements('meaning')
.map(
(e) => Meaning(
@@ -152,7 +162,8 @@ List<Character> parseKANJIDICData(XmlElement root) {
)
.toList() ??
[],
nanori: reading_meaning
nanori:
reading_meaning
?.findElements('nanori')
.map((e) => e.innerText)
.toList() ??

View File

@@ -33,21 +33,22 @@ Future<Database> openLocalDb({
throw Exception("JADB_PATH does not exist: $jadbPath");
}
final db = await createDatabaseFactoryFfi(
ffiInit: () =>
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
).openDatabase(
jadbPath,
options: OpenDatabaseOptions(
onConfigure: (db) async {
if (walMode) {
await db.execute("PRAGMA journal_mode = WAL");
}
await db.execute("PRAGMA foreign_keys = ON");
},
readOnly: !readWrite,
),
);
final db =
await createDatabaseFactoryFfi(
ffiInit: () =>
open.overrideForAll(() => DynamicLibrary.open(libsqlitePath!)),
).openDatabase(
jadbPath,
options: OpenDatabaseOptions(
onConfigure: (db) async {
if (walMode) {
await db.execute("PRAGMA journal_mode = WAL");
}
await db.execute("PRAGMA foreign_keys = ON");
},
readOnly: !readWrite,
),
);
if (verifyTablesExist) {
await db.jadbVerifyTables();

View File

@@ -3,8 +3,10 @@ import 'dart:io';
Iterable<String> parseRADKFILEBlocks(File radkfile) {
final String content = File('data/tmp/radkfile_utf8').readAsStringSync();
final Iterable<String> blocks =
content.replaceAll(RegExp(r'^#.*$'), '').split(r'$').skip(2);
final Iterable<String> blocks = content
.replaceAll(RegExp(r'^#.*$'), '')
.split(r'$')
.skip(2);
return blocks;
}

View File

@@ -1,27 +1,20 @@
import 'package:jadb/table_names/radkfile.dart';
import 'package:sqflite_common/sqlite_api.dart';
Future<void> seedRADKFILEData(
Iterable<String> blocks,
Database db,
) async {
Future<void> seedRADKFILEData(Iterable<String> blocks, Database db) async {
final b = db.batch();
for (final block in blocks) {
final String radical = block[1];
final List<String> kanjiList = block
.replaceFirst(RegExp(r'.*\n'), '')
.split('')
..removeWhere((e) => e == '' || e == '\n');
final List<String> kanjiList =
block.replaceFirst(RegExp(r'.*\n'), '').split('')
..removeWhere((e) => e == '' || e == '\n');
for (final kanji in kanjiList.toSet()) {
b.insert(
RADKFILETableNames.radkfile,
{
'radical': radical,
'kanji': kanji,
},
);
b.insert(RADKFILETableNames.radkfile, {
'radical': radical,
'kanji': kanji,
});
}
}

View File

@@ -31,23 +31,27 @@ Future<List<JLPTRankedWord>> parseJLPTRankedWords(
final kanji = (row[0] as String).isEmpty
? null
: (row[0] as String)
.replaceFirst(RegExp('^お・'), '')
.replaceAll(RegExp(r'.*'), '');
.replaceFirst(RegExp('^お・'), '')
.replaceAll(RegExp(r'.*'), '');
final readings = (row[1] as String)
.split(RegExp('[・/、(:?\s+)]'))
.map((e) => e.trim())
.toList();
final meanings =
(row[2] as String).split(',').expand(cleanMeaning).toList();
final meanings = (row[2] as String)
.split(',')
.expand(cleanMeaning)
.toList();
result.add(JLPTRankedWord(
readings: readings,
kanji: kanji,
jlptLevel: jlptLevel,
meanings: meanings,
));
result.add(
JLPTRankedWord(
readings: readings,
kanji: kanji,
jlptLevel: jlptLevel,
meanings: meanings,
),
);
}
}

View File

@@ -3,47 +3,37 @@ import 'package:jadb/_data_ingestion/tanos-jlpt/objects.dart';
import 'package:jadb/_data_ingestion/tanos-jlpt/overrides.dart';
import 'package:sqflite_common/sqlite_api.dart';
Future<List<int>> _findReadingCandidates(
JLPTRankedWord word,
Database db,
) =>
db
.query(
JMdictTableNames.readingElement,
columns: ['entryId'],
where:
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
whereArgs: [...word.readings],
)
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
Future<List<int>> _findReadingCandidates(JLPTRankedWord word, Database db) => db
.query(
JMdictTableNames.readingElement,
columns: ['entryId'],
where:
'"reading" IN (${List.filled(word.readings.length, '?').join(',')})',
whereArgs: [...word.readings],
)
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
Future<List<int>> _findKanjiCandidates(
JLPTRankedWord word,
Database db,
) =>
db
.query(
JMdictTableNames.kanjiElement,
columns: ['entryId'],
where: 'reading = ?',
whereArgs: [word.kanji],
)
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
Future<List<int>> _findKanjiCandidates(JLPTRankedWord word, Database db) => db
.query(
JMdictTableNames.kanjiElement,
columns: ['entryId'],
where: 'reading = ?',
whereArgs: [word.kanji],
)
.then((rows) => rows.map((row) => row['entryId'] as int).toList());
Future<List<(int, String)>> _findSenseCandidates(
JLPTRankedWord word,
Database db,
) =>
db.rawQuery(
) => db
.rawQuery(
'SELECT entryId, phrase '
'FROM "${JMdictTableNames.senseGlossary}" '
'JOIN "${JMdictTableNames.sense}" USING (senseId)'
'WHERE phrase IN (${List.filled(
word.meanings.length,
'?',
).join(',')})',
'WHERE phrase IN (${List.filled(word.meanings.length, '?').join(',')})',
[...word.meanings],
).then(
)
.then(
(rows) => rows
.map((row) => (row['entryId'] as int, row['phrase'] as String))
.toList(),
@@ -55,8 +45,10 @@ Future<int?> findEntry(
bool useOverrides = true,
}) async {
final List<int> readingCandidates = await _findReadingCandidates(word, db);
final List<(int, String)> senseCandidates =
await _findSenseCandidates(word, db);
final List<(int, String)> senseCandidates = await _findSenseCandidates(
word,
db,
);
List<int> entryIds;
@@ -71,8 +63,10 @@ Future<int?> findEntry(
print('No entry found, trying to combine with senses');
entryIds = readingCandidates
.where((readingId) =>
senseCandidates.any((sense) => sense.$1 == readingId))
.where(
(readingId) =>
senseCandidates.any((sense) => sense.$1 == readingId),
)
.toList();
}
} else {
@@ -88,12 +82,15 @@ Future<int?> findEntry(
if (overrideEntries.length > 1) {
throw Exception(
'Multiple override entries found for ${word.toString()}: $entryIds');
'Multiple override entries found for ${word.toString()}: $entryIds',
);
} else if (overrideEntries.length == 0 &&
!word.readings.any((reading) =>
TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)))) {
!word.readings.any(
(reading) => TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)),
)) {
throw Exception(
'No override entry found for ${word.toString()}: $entryIds');
'No override entry found for ${word.toString()}: $entryIds',
);
}
print('Found override: ${overrideEntries.firstOrNull}');
@@ -103,7 +100,8 @@ Future<int?> findEntry(
if (entryIds.length > 1) {
throw Exception(
'Multiple override entries found for ${word.toString()}: $entryIds');
'Multiple override entries found for ${word.toString()}: $entryIds',
);
} else if (entryIds.isEmpty) {
throw Exception('No entry found for ${word.toString()}');
}

View File

@@ -12,13 +12,10 @@ Future<void> seedTanosJLPTData(
final entryIds = jlptLevel.value;
for (final entryId in entryIds) {
b.insert(
TanosJLPTTableNames.jlptTag,
{
'entryId': entryId,
'jlptLevel': level,
},
);
b.insert(TanosJLPTTableNames.jlptTag, {
'entryId': entryId,
'jlptLevel': level,
});
}
}

View File

@@ -36,14 +36,17 @@ class CreateDb extends Command {
);
bool failed = false;
await seedData(db).then((_) {
print("Database created successfully");
}).catchError((error) {
print("Error creating database: $error");
failed = true;
}).whenComplete(() {
db.close();
});
await seedData(db)
.then((_) {
print("Database created successfully");
})
.catchError((error) {
print("Error creating database: $error");
failed = true;
})
.whenComplete(() {
db.close();
});
if (failed) {
exit(1);
} else {

View File

@@ -63,7 +63,8 @@ Future<void> resolveExisting(
for (final (i, word) in rankedWords.indexed) {
try {
print(
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}');
'[${(i + 1).toString().padLeft(4, '0')}/${rankedWords.length}] ${word.toString()}',
);
await findEntry(word, db, useOverrides: useOverrides);
} catch (e) {
print(e);
@@ -78,16 +79,19 @@ Future<void> resolveExisting(
print('Statistics:');
for (final jlptLevel in ['N5', 'N4', 'N3', 'N2', 'N1']) {
final missingWordCount =
missingWords.where((e) => e.jlptLevel == jlptLevel).length;
final totalWordCount =
rankedWords.where((e) => e.jlptLevel == jlptLevel).length;
final missingWordCount = missingWords
.where((e) => e.jlptLevel == jlptLevel)
.length;
final totalWordCount = rankedWords
.where((e) => e.jlptLevel == jlptLevel)
.length;
final failureRate =
((missingWordCount / totalWordCount) * 100).toStringAsFixed(2);
final failureRate = ((missingWordCount / totalWordCount) * 100)
.toStringAsFixed(2);
print(
'${jlptLevel} failures: [${missingWordCount}/${totalWordCount}] (${failureRate}%)');
'${jlptLevel} failures: [${missingWordCount}/${totalWordCount}] (${failureRate}%)',
);
}
print('Not able to determine the entry for ${missingWords.length} words');

View File

@@ -17,24 +17,11 @@ class QueryWord extends Command {
addLibsqliteArg(argParser);
addJadbArg(argParser);
argParser.addFlag(
'json',
abbr: 'j',
help: 'Output results in JSON format',
);
argParser.addFlag('json', abbr: 'j', help: 'Output results in JSON format');
argParser.addOption(
'page',
abbr: 'p',
valueHelp: 'NUM',
defaultsTo: '1',
);
argParser.addOption('page', abbr: 'p', valueHelp: 'NUM', defaultsTo: '1');
argParser.addOption(
'pageSize',
valueHelp: 'NUM',
defaultsTo: '30',
);
argParser.addOption('pageSize', valueHelp: 'NUM', defaultsTo: '30');
}
Future<void> run() async {
@@ -62,11 +49,7 @@ class QueryWord extends Command {
final int? maybeId = int.tryParse(searchWord);
if (maybeId != null && maybeId >= 1000000) {
await _searchId(
db,
maybeId,
argResults!.flag('json'),
);
await _searchId(db, maybeId, argResults!.flag('json'));
} else {
await _searchWord(
db,
@@ -78,11 +61,7 @@ class QueryWord extends Command {
}
}
Future<void> _searchId(
DatabaseExecutor db,
int id,
bool jsonOutput,
) async {
Future<void> _searchId(DatabaseExecutor db, int id, bool jsonOutput) async {
final time = Stopwatch()..start();
final result = await JaDBConnection(db).jadbGetWordById(id);
time.stop();
@@ -112,11 +91,9 @@ class QueryWord extends Command {
time.stop();
final time2 = Stopwatch()..start();
final result = await JaDBConnection(db).jadbSearchWord(
searchWord,
page: page,
pageSize: pageSize,
);
final result = await JaDBConnection(
db,
).jadbSearchWord(searchWord, page: page, pageSize: pageSize);
time2.stop();
if (result == null) {

View File

@@ -1,6 +1,6 @@
/// Jouyou kanji sorted primarily by grades and secondarily by strokes.
const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
{
const Map<int, Map<int, List<String>>>
JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT = {
1: {
1: [''],
2: ['', '', '', '', '', '', '', ''],
@@ -12,7 +12,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
8: ['', '', '', '', '', ''],
9: ['', ''],
10: [''],
12: ['']
12: [''],
},
2: {
2: [''],
@@ -35,7 +35,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
5: ['', '', '', '', '', '', '', '', '', '', '', ''],
6: [
@@ -58,7 +58,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
7: [
'',
@@ -78,7 +78,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
8: [
'',
@@ -95,7 +95,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
9: [
'',
@@ -115,7 +115,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
10: ['', '', '', '', '', '', '', '', '', '', '', ''],
11: ['', '', '', '', '', '', '', '', '', '', '', '', ''],
@@ -124,7 +124,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
14: ['', '', '', '', '', ''],
15: [''],
16: ['', ''],
18: ['', '']
18: ['', ''],
},
3: {
2: [''],
@@ -146,7 +146,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
6: ['', '', '', '', '', '', '', '', '', '', '', '', '', ''],
7: ['', '', '', '', '', '', '', '', '', '', '', '', '', ''],
@@ -178,7 +178,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
9: [
'',
@@ -210,7 +210,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
10: [
'',
@@ -232,7 +232,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
11: [
'',
@@ -253,7 +253,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
12: [
'',
@@ -282,13 +282,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
13: ['', '', '', '', '', '', '', '', '', '', ''],
14: ['', '', '', '', '', ''],
15: ['', '調', '', ''],
16: ['', '', '', ''],
18: ['']
18: [''],
},
4: {
4: ['', '', '', '', ''],
@@ -318,7 +318,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
8: [
'',
@@ -346,7 +346,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
9: [
'',
@@ -367,7 +367,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
10: [
'',
@@ -389,7 +389,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
11: [
'',
@@ -410,7 +410,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
12: [
'',
@@ -434,7 +434,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
13: ['', '', '', '', '', '', '', '', '', '', ''],
14: ['', '', '', '', '', '', '', '', '', ''],
@@ -442,7 +442,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
16: ['', '', ''],
18: ['', '', ''],
19: ['', ''],
20: ['', '']
20: ['', ''],
},
5: {
3: ['', ''],
@@ -464,7 +464,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
8: [
'',
@@ -484,7 +484,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
9: ['', '', '', '', '', '', '', '', '', '', '', '', ''],
10: [
@@ -505,7 +505,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
11: [
'',
@@ -537,7 +537,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
12: [
'貿',
@@ -561,7 +561,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
13: ['', '', '', '', '', '', '', '', '', '', '', '', '', ''],
14: [
@@ -583,14 +583,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
15: ['', '', '', '', '', '', '', ''],
16: ['', '', '', '', ''],
17: ['', '', ''],
18: ['', '', ''],
19: [''],
20: ['']
20: [''],
},
6: {
3: ['', '', '', ''],
@@ -618,7 +618,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'沿',
''
'',
],
9: [
'',
@@ -641,7 +641,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
10: [
'',
@@ -667,7 +667,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
11: [
'',
@@ -689,7 +689,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
12: [
'',
@@ -710,7 +710,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
13: [
'',
@@ -727,14 +727,14 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
14: ['', '', '', '', '', '', '', '', '', '', '', ''],
15: ['', '', '', '', '', '', '', '', '', ''],
16: ['', '', '', '', '', '', '', ''],
17: ['', '', '', ''],
18: ['', '', ''],
19: ['', '']
19: ['', ''],
},
7: {
1: [''],
@@ -760,7 +760,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
5: [
'',
@@ -790,7 +790,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
6: [
'',
@@ -831,7 +831,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
7: [
'',
@@ -896,7 +896,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
8: [
'',
@@ -989,7 +989,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
9: [
'',
@@ -1081,7 +1081,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
10: [
'',
@@ -1206,7 +1206,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
11: [
'',
@@ -1323,7 +1323,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
12: [
'',
@@ -1435,7 +1435,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
13: [
'',
@@ -1552,7 +1552,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
14: [
'',
@@ -1617,7 +1617,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
15: [
'',
@@ -1706,7 +1706,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
16: [
'',
@@ -1764,7 +1764,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
17: [
'',
@@ -1801,7 +1801,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
18: [
'',
@@ -1830,7 +1830,7 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
19: [
'',
@@ -1851,13 +1851,13 @@ const Map<int, Map<int, List<String>>> JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT =
'',
'',
'',
''
'',
],
20: ['', '', '', '', '', '', '', ''],
21: ['', '', '', '', '', ''],
22: ['', '', ''],
23: [''],
29: ['']
29: [''],
},
};
@@ -1866,7 +1866,8 @@ final Map<int, List<String>> JOUYOU_KANJI_BY_GRADES =
.expand((entry) => entry.value.entries)
.map((entry) => MapEntry(entry.key, entry.value))
.fold<Map<int, List<String>>>(
{},
(acc, entry) => acc
..putIfAbsent(entry.key, () => [])
..update(entry.key, (value) => value..addAll(entry.value)));
{},
(acc, entry) => acc
..putIfAbsent(entry.key, () => [])
..update(entry.key, (value) => value..addAll(entry.value)),
);

View File

@@ -31,7 +31,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
'𠂉'
'𠂉',
],
3: [
'',
@@ -78,7 +78,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
''
'',
],
4: [
'',
@@ -124,7 +124,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
''
'',
],
5: [
'',
@@ -154,7 +154,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
''
'',
],
6: [
'',
@@ -181,7 +181,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
'西'
'西',
],
7: [
'',
@@ -204,7 +204,7 @@ const Map<int, List<String>> RADICALS = {
'',
'',
'',
''
'',
],
8: ['', '', '', '', '', '', '', '', '', '', '', ''],
9: ['', '', '', '', '', '', '', '', '', '', ''],

View File

@@ -19,20 +19,14 @@ enum JMdictDialect {
final String id;
final String description;
const JMdictDialect({
required this.id,
required this.description,
});
const JMdictDialect({required this.id, required this.description});
static JMdictDialect fromId(String id) => JMdictDialect.values.firstWhere(
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictDialect fromJson(Map<String, Object?> json) =>
JMdictDialect.values.firstWhere(

View File

@@ -102,20 +102,14 @@ enum JMdictField {
final String id;
final String description;
const JMdictField({
required this.id,
required this.description,
});
const JMdictField({required this.id, required this.description});
static JMdictField fromId(String id) => JMdictField.values.firstWhere(
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictField fromJson(Map<String, Object?> json) =>
JMdictField.values.firstWhere(

View File

@@ -13,20 +13,14 @@ enum JMdictKanjiInfo {
final String id;
final String description;
const JMdictKanjiInfo({
required this.id,
required this.description,
});
const JMdictKanjiInfo({required this.id, required this.description});
static JMdictKanjiInfo fromId(String id) => JMdictKanjiInfo.values.firstWhere(
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictKanjiInfo fromJson(Map<String, Object?> json) =>
JMdictKanjiInfo.values.firstWhere(

View File

@@ -74,20 +74,14 @@ enum JMdictMisc {
final String id;
final String description;
const JMdictMisc({
required this.id,
required this.description,
});
const JMdictMisc({required this.id, required this.description});
static JMdictMisc fromId(String id) => JMdictMisc.values.firstWhere(
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictMisc fromJson(Map<String, Object?> json) =>
JMdictMisc.values.firstWhere(

View File

@@ -202,14 +202,11 @@ enum JMdictPOS {
String get shortDescription => _shortDescription ?? description;
static JMdictPOS fromId(String id) => JMdictPOS.values.firstWhere(
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
(e) => e.id == id,
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictPOS fromJson(Map<String, Object?> json) =>
JMdictPOS.values.firstWhere(

View File

@@ -15,10 +15,7 @@ enum JMdictReadingInfo {
final String id;
final String description;
const JMdictReadingInfo({
required this.id,
required this.description,
});
const JMdictReadingInfo({required this.id, required this.description});
static JMdictReadingInfo fromId(String id) =>
JMdictReadingInfo.values.firstWhere(
@@ -26,10 +23,7 @@ enum JMdictReadingInfo {
orElse: () => throw Exception('Unknown id: $id'),
);
Map<String, Object?> toJson() => {
'id': id,
'description': description,
};
Map<String, Object?> toJson() => {'id': id, 'description': description};
static JMdictReadingInfo fromJson(Map<String, Object?> json) =>
JMdictReadingInfo.values.firstWhere(

View File

@@ -26,19 +26,14 @@ class KanjiSearchRadical extends Equatable {
});
@override
List<Object> get props => [
symbol,
this.names,
forms,
meanings,
];
List<Object> get props => [symbol, this.names, forms, meanings];
Map<String, dynamic> toJson() => {
'symbol': symbol,
'names': names,
'forms': forms,
'meanings': meanings,
};
'symbol': symbol,
'names': names,
'forms': forms,
'meanings': meanings,
};
factory KanjiSearchRadical.fromJson(Map<String, dynamic> json) {
return KanjiSearchRadical(

View File

@@ -89,46 +89,46 @@ class KanjiSearchResult extends Equatable {
@override
// ignore: public_member_api_docs
List<Object?> get props => [
taughtIn,
jlptLevel,
newspaperFrequencyRank,
strokeCount,
meanings,
kunyomi,
onyomi,
// kunyomiExamples,
// onyomiExamples,
radical,
parts,
codepoints,
kanji,
nanori,
alternativeLanguageReadings,
strokeMiscounts,
queryCodes,
dictionaryReferences,
];
taughtIn,
jlptLevel,
newspaperFrequencyRank,
strokeCount,
meanings,
kunyomi,
onyomi,
// kunyomiExamples,
// onyomiExamples,
radical,
parts,
codepoints,
kanji,
nanori,
alternativeLanguageReadings,
strokeMiscounts,
queryCodes,
dictionaryReferences,
];
Map<String, dynamic> toJson() => {
'kanji': kanji,
'taughtIn': taughtIn,
'jlptLevel': jlptLevel,
'newspaperFrequencyRank': newspaperFrequencyRank,
'strokeCount': strokeCount,
'meanings': meanings,
'kunyomi': kunyomi,
'onyomi': onyomi,
// 'onyomiExamples': onyomiExamples,
// 'kunyomiExamples': kunyomiExamples,
'radical': radical?.toJson(),
'parts': parts,
'codepoints': codepoints,
'nanori': nanori,
'alternativeLanguageReadings': alternativeLanguageReadings,
'strokeMiscounts': strokeMiscounts,
'queryCodes': queryCodes,
'dictionaryReferences': dictionaryReferences,
};
'kanji': kanji,
'taughtIn': taughtIn,
'jlptLevel': jlptLevel,
'newspaperFrequencyRank': newspaperFrequencyRank,
'strokeCount': strokeCount,
'meanings': meanings,
'kunyomi': kunyomi,
'onyomi': onyomi,
// 'onyomiExamples': onyomiExamples,
// 'kunyomiExamples': kunyomiExamples,
'radical': radical?.toJson(),
'parts': parts,
'codepoints': codepoints,
'nanori': nanori,
'alternativeLanguageReadings': alternativeLanguageReadings,
'strokeMiscounts': strokeMiscounts,
'queryCodes': queryCodes,
'dictionaryReferences': dictionaryReferences,
};
factory KanjiSearchResult.fromJson(Map<String, dynamic> json) {
return KanjiSearchResult(
@@ -156,23 +156,20 @@ class KanjiSearchResult extends Equatable {
nanori: (json['nanori'] as List).map((e) => e as String).toList(),
alternativeLanguageReadings:
(json['alternativeLanguageReadings'] as Map<String, dynamic>).map(
(key, value) => MapEntry(
key,
(value as List).map((e) => e as String).toList(),
),
),
strokeMiscounts:
(json['strokeMiscounts'] as List).map((e) => e as int).toList(),
(key, value) =>
MapEntry(key, (value as List).map((e) => e as String).toList()),
),
strokeMiscounts: (json['strokeMiscounts'] as List)
.map((e) => e as int)
.toList(),
queryCodes: (json['queryCodes'] as Map<String, dynamic>).map(
(key, value) => MapEntry(
key,
(value as List).map((e) => e as String).toList(),
),
(key, value) =>
MapEntry(key, (value as List).map((e) => e as String).toList()),
),
dictionaryReferences:
(json['dictionaryReferences'] as Map<String, dynamic>).map(
(key, value) => MapEntry(key, value as String),
),
(key, value) => MapEntry(key, value as String),
),
);
}
}

View File

@@ -7,14 +7,14 @@ import 'package:sqflite_common/sqlite_api.dart';
Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
final Set<String> tables = await db
.query(
'sqlite_master',
columns: ['name'],
where: 'type = ?',
whereArgs: ['table'],
)
'sqlite_master',
columns: ['name'],
where: 'type = ?',
whereArgs: ['table'],
)
.then((result) {
return result.map((row) => row['name'] as String).toSet();
});
return result.map((row) => row['name'] as String).toSet();
});
final Set<String> expectedTables = {
...JMdictTableNames.allTables,
@@ -26,14 +26,16 @@ Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
final missingTables = expectedTables.difference(tables);
if (missingTables.isNotEmpty) {
throw Exception([
'Missing tables:',
missingTables.map((table) => ' - $table').join('\n'),
'',
'Found tables:\n',
tables.map((table) => ' - $table').join('\n'),
'',
'Please ensure the database is correctly set up.',
].join('\n'));
throw Exception(
[
'Missing tables:',
missingTables.map((table) => ' - $table').join('\n'),
'',
'Found tables:\n',
tables.map((table) => ' - $table').join('\n'),
'',
'Please ensure the database is correctly set up.',
].join('\n'),
);
}
}

View File

@@ -47,18 +47,18 @@ class WordSearchResult {
});
Map<String, dynamic> toJson() => {
'_score': score,
'entryId': entryId,
'isCommon': isCommon,
'japanese': japanese.map((e) => e.toJson()).toList(),
'kanjiInfo':
kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
'readingInfo':
readingInfo.map((key, value) => MapEntry(key, value.toJson())),
'senses': senses.map((e) => e.toJson()).toList(),
'jlptLevel': jlptLevel.toJson(),
'sources': sources.toJson(),
};
'_score': score,
'entryId': entryId,
'isCommon': isCommon,
'japanese': japanese.map((e) => e.toJson()).toList(),
'kanjiInfo': kanjiInfo.map((key, value) => MapEntry(key, value.toJson())),
'readingInfo': readingInfo.map(
(key, value) => MapEntry(key, value.toJson()),
),
'senses': senses.map((e) => e.toJson()).toList(),
'jlptLevel': jlptLevel.toJson(),
'sources': sources.toJson(),
};
factory WordSearchResult.fromJson(Map<String, dynamic> json) =>
WordSearchResult(

View File

@@ -6,18 +6,12 @@ class WordSearchRuby {
/// Furigana, if applicable.
String? furigana;
WordSearchRuby({
required this.base,
this.furigana,
});
WordSearchRuby({required this.base, this.furigana});
Map<String, dynamic> toJson() => {
'base': base,
'furigana': furigana,
};
Map<String, dynamic> toJson() => {'base': base, 'furigana': furigana};
factory WordSearchRuby.fromJson(Map<String, dynamic> json) => WordSearchRuby(
base: json['base'] as String,
furigana: json['furigana'] as String?,
);
base: json['base'] as String,
furigana: json['furigana'] as String?,
);
}

View File

@@ -71,18 +71,18 @@ class WordSearchSense {
languageSource.isEmpty;
Map<String, dynamic> toJson() => {
'englishDefinitions': englishDefinitions,
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
'antonyms': antonyms.map((e) => e.toJson()).toList(),
'restrictedToReading': restrictedToReading,
'restrictedToKanji': restrictedToKanji,
'fields': fields.map((e) => e.toJson()).toList(),
'dialects': dialects.map((e) => e.toJson()).toList(),
'misc': misc.map((e) => e.toJson()).toList(),
'info': info,
'languageSource': languageSource,
};
'englishDefinitions': englishDefinitions,
'partsOfSpeech': partsOfSpeech.map((e) => e.toJson()).toList(),
'seeAlso': seeAlso.map((e) => e.toJson()).toList(),
'antonyms': antonyms.map((e) => e.toJson()).toList(),
'restrictedToReading': restrictedToReading,
'restrictedToKanji': restrictedToKanji,
'fields': fields.map((e) => e.toJson()).toList(),
'dialects': dialects.map((e) => e.toJson()).toList(),
'misc': misc.map((e) => e.toJson()).toList(),
'info': info,
'languageSource': languageSource,
};
factory WordSearchSense.fromJson(Map<String, dynamic> json) =>
WordSearchSense(
@@ -104,8 +104,9 @@ class WordSearchSense {
dialects: (json['dialects'] as List)
.map((e) => JMdictDialect.fromJson(e))
.toList(),
misc:
(json['misc'] as List).map((e) => JMdictMisc.fromJson(e)).toList(),
misc: (json['misc'] as List)
.map((e) => JMdictMisc.fromJson(e))
.toList(),
info: List<String>.from(json['info']),
languageSource: (json['languageSource'] as List)
.map((e) => WordSearchSenseLanguageSource.fromJson(e))

View File

@@ -13,11 +13,11 @@ class WordSearchSenseLanguageSource {
});
Map<String, Object?> toJson() => {
'language': language,
'phrase': phrase,
'fullyDescribesSense': fullyDescribesSense,
'constructedFromSmallerWords': constructedFromSmallerWords,
};
'language': language,
'phrase': phrase,
'fullyDescribesSense': fullyDescribesSense,
'constructedFromSmallerWords': constructedFromSmallerWords,
};
factory WordSearchSenseLanguageSource.fromJson(Map<String, dynamic> json) =>
WordSearchSenseLanguageSource(

View File

@@ -7,20 +7,11 @@ class WordSearchSources {
/// Whether JMnedict was used.
final bool jmnedict;
const WordSearchSources({
this.jmdict = true,
this.jmnedict = false,
});
const WordSearchSources({this.jmdict = true, this.jmnedict = false});
Map<String, Object?> get sqlValue => {
'jmdict': jmdict,
'jmnedict': jmnedict,
};
Map<String, Object?> get sqlValue => {'jmdict': jmdict, 'jmnedict': jmnedict};
Map<String, dynamic> toJson() => {
'jmdict': jmdict,
'jmnedict': jmnedict,
};
Map<String, dynamic> toJson() => {'jmdict': jmdict, 'jmnedict': jmnedict};
factory WordSearchSources.fromJson(Map<String, dynamic> json) =>
WordSearchSources(

View File

@@ -21,11 +21,11 @@ class WordSearchXrefEntry {
});
Map<String, dynamic> toJson() => {
'entryId': entryId,
'ambiguous': ambiguous,
'baseWord': baseWord,
'furigana': furigana,
};
'entryId': entryId,
'ambiguous': ambiguous,
'baseWord': baseWord,
'furigana': furigana,
};
factory WordSearchXrefEntry.fromJson(Map<String, dynamic> json) =>
WordSearchXrefEntry(

View File

@@ -27,8 +27,7 @@ extension JaDBConnection on DatabaseExecutor {
Future<List<String>> filterKanji(
List<String> kanji, {
bool deduplicate = false,
}) =>
filterKanjiWithDbConnection(this, kanji, deduplicate);
}) => filterKanjiWithDbConnection(this, kanji, deduplicate);
/// Search for a word in the database.
Future<List<WordSearchResult>?> jadbSearchWord(
@@ -36,14 +35,13 @@ extension JaDBConnection on DatabaseExecutor {
SearchMode searchMode = SearchMode.Auto,
int page = 0,
int? pageSize,
}) =>
searchWordWithDbConnection(
this,
word,
searchMode: searchMode,
page: page,
pageSize: pageSize,
);
}) => searchWordWithDbConnection(
this,
word,
searchMode: searchMode,
page: page,
pageSize: pageSize,
);
///
Future<WordSearchResult?> jadbGetWordById(int id) =>
@@ -59,12 +57,7 @@ extension JaDBConnection on DatabaseExecutor {
Future<int?> jadbSearchWordCount(
String word, {
SearchMode searchMode = SearchMode.Auto,
}) =>
searchWordCountWithDbConnection(
this,
word,
searchMode: searchMode,
);
}) => searchWordCountWithDbConnection(this, word, searchMode: searchMode);
/// Given a list of radicals, search which kanji contains all
/// of the radicals, find their other radicals, and return those.

View File

@@ -6,14 +6,13 @@ Future<List<String>> filterKanjiWithDbConnection(
List<String> kanji,
bool deduplicate,
) async {
final Set<String> filteredKanji = await connection.rawQuery(
'''
final Set<String> filteredKanji = await connection
.rawQuery('''
SELECT "literal"
FROM "${KANJIDICTableNames.character}"
WHERE "literal" IN (${kanji.map((_) => '?').join(',')})
''',
kanji,
).then((value) => value.map((e) => e['literal'] as String).toSet());
''', kanji)
.then((value) => value.map((e) => e['literal'] as String).toSet());
if (deduplicate) {
return filteredKanji.toList();

View File

@@ -116,10 +116,10 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
// whereArgs: [kanji],
// );
// TODO: Search for kunyomi and onyomi usage of the characters
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
// by JLPT, news frequency, etc.
// TODO: Search for kunyomi and onyomi usage of the characters
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
// by JLPT, news frequency, etc.
await characters_query.then((value) => characters = value);
@@ -157,9 +157,7 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
: null;
final alternativeLanguageReadings = readings
.groupListsBy(
(item) => item['type'] as String,
)
.groupListsBy((item) => item['type'] as String)
.map(
(key, value) => MapEntry(
key,
@@ -169,14 +167,10 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
// TODO: Add `SKIPMisclassification` to the entries
final queryCodes = query_codes
.groupListsBy(
(item) => item['type'] as String,
)
.groupListsBy((item) => item['type'] as String)
.map(
(key, value) => MapEntry(
key,
value.map((item) => item['code'] as String).toList(),
),
(key, value) =>
MapEntry(key, value.map((item) => item['code'] as String).toList()),
);
// TODO: Add `volume` and `page` to the entries
@@ -213,8 +207,9 @@ Future<KanjiSearchResult?> searchKanjiWithDbConnection(
},
nanori: nanoris.map((item) => item['nanori'] as String).toList(),
alternativeLanguageReadings: alternativeLanguageReadings,
strokeMiscounts:
stroke_miscounts.map((item) => item['strokeCount'] as int).toList(),
strokeMiscounts: stroke_miscounts
.map((item) => item['strokeCount'] as int)
.toList(),
queryCodes: queryCodes,
dictionaryReferences: dictionaryReferences,
);

View File

@@ -19,14 +19,12 @@ Future<List<String>> searchRemainingRadicalsWithDbConnection(
HAVING COUNT(DISTINCT "radical") = ?
)
''',
[
...radicals,
radicals.length,
],
[...radicals, radicals.length],
);
final remainingRadicals =
queryResult.map((row) => row['radical'] as String).toList();
final remainingRadicals = queryResult
.map((row) => row['radical'] as String)
.toList();
return remainingRadicals;
}
@@ -43,10 +41,7 @@ Future<List<String>> searchKanjiByRadicalsWithDbConnection(
GROUP BY "kanji"
HAVING COUNT(DISTINCT "radical") = ?
''',
[
...radicals,
radicals.length,
],
[...radicals, radicals.length],
);
final kanji = queryResult.map((row) => row['kanji'] as String).toList();

View File

@@ -61,22 +61,22 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
);
late final List<Map<String, Object?>> readingElements;
final Future<List<Map<String, Object?>>> readingElements_query =
connection.query(
JMdictTableNames.readingElement,
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
orderBy: 'orderNum',
);
final Future<List<Map<String, Object?>>> readingElements_query = connection
.query(
JMdictTableNames.readingElement,
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
orderBy: 'orderNum',
);
late final List<Map<String, Object?>> kanjiElements;
final Future<List<Map<String, Object?>>> kanjiElements_query =
connection.query(
JMdictTableNames.kanjiElement,
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
orderBy: 'orderNum',
);
final Future<List<Map<String, Object?>>> kanjiElements_query = connection
.query(
JMdictTableNames.kanjiElement,
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
orderBy: 'orderNum',
);
late final List<Map<String, Object?>> jlptTags;
final Future<List<Map<String, Object?>>> jlptTags_query = connection.query(
@@ -86,12 +86,12 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
);
late final List<Map<String, Object?>> commonEntries;
final Future<List<Map<String, Object?>>> commonEntries_query =
connection.query(
'JMdict_EntryCommon',
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
);
final Future<List<Map<String, Object?>>> commonEntries_query = connection
.query(
'JMdict_EntryCommon',
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
whereArgs: entryIds,
);
await Future.wait([
senses_query.then((value) => senses = value),
@@ -106,9 +106,9 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
final senseIds = senses.map((sense) => sense['senseId'] as int).toList();
late final List<Map<String, Object?>> senseAntonyms;
final Future<List<Map<String, Object?>>> senseAntonyms_query =
connection.rawQuery(
"""
final Future<List<Map<String, Object?>>> senseAntonyms_query = connection
.rawQuery(
"""
SELECT
"${JMdictTableNames.senseAntonyms}".senseId,
"${JMdictTableNames.senseAntonyms}".ambiguous,
@@ -125,16 +125,16 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
"${JMdictTableNames.senseAntonyms}"."senseId",
"${JMdictTableNames.senseAntonyms}"."xrefEntryId"
""",
[...senseIds],
);
[...senseIds],
);
late final List<Map<String, Object?>> senseDialects;
final Future<List<Map<String, Object?>>> senseDialects_query =
connection.query(
JMdictTableNames.senseDialect,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
final Future<List<Map<String, Object?>>> senseDialects_query = connection
.query(
JMdictTableNames.senseDialect,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
late final List<Map<String, Object?>> senseFields;
final Future<List<Map<String, Object?>>> senseFields_query = connection.query(
@@ -144,12 +144,12 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
);
late final List<Map<String, Object?>> senseGlossaries;
final Future<List<Map<String, Object?>>> senseGlossaries_query =
connection.query(
JMdictTableNames.senseGlossary,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
final Future<List<Map<String, Object?>>> senseGlossaries_query = connection
.query(
JMdictTableNames.senseGlossary,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
late final List<Map<String, Object?>> senseInfos;
final Future<List<Map<String, Object?>>> senseInfos_query = connection.query(
@@ -161,10 +161,10 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
late final List<Map<String, Object?>> senseLanguageSources;
final Future<List<Map<String, Object?>>> senseLanguageSources_query =
connection.query(
JMdictTableNames.senseLanguageSource,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
JMdictTableNames.senseLanguageSource,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
late final List<Map<String, Object?>> senseMiscs;
final Future<List<Map<String, Object?>>> senseMiscs_query = connection.query(
@@ -183,23 +183,23 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
late final List<Map<String, Object?>> senseRestrictedToKanjis;
final Future<List<Map<String, Object?>>> senseRestrictedToKanjis_query =
connection.query(
JMdictTableNames.senseRestrictedToKanji,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
JMdictTableNames.senseRestrictedToKanji,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
late final List<Map<String, Object?>> senseRestrictedToReadings;
final Future<List<Map<String, Object?>>> senseRestrictedToReadings_query =
connection.query(
JMdictTableNames.senseRestrictedToReading,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
JMdictTableNames.senseRestrictedToReading,
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
late final List<Map<String, Object?>> senseSeeAlsos;
final Future<List<Map<String, Object?>>> senseSeeAlsos_query =
connection.rawQuery(
"""
final Future<List<Map<String, Object?>>> senseSeeAlsos_query = connection
.rawQuery(
"""
SELECT
"${JMdictTableNames.senseSeeAlso}"."senseId",
"${JMdictTableNames.senseSeeAlso}"."ambiguous",
@@ -216,16 +216,16 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
"${JMdictTableNames.senseSeeAlso}"."senseId",
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId"
""",
[...senseIds],
);
[...senseIds],
);
late final List<Map<String, Object?>> exampleSentences;
final Future<List<Map<String, Object?>>> exampleSentences_query =
connection.query(
'JMdict_ExampleSentence',
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
final Future<List<Map<String, Object?>>> exampleSentences_query = connection
.query(
'JMdict_ExampleSentence',
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
whereArgs: senseIds,
);
// Reading queries
@@ -236,18 +236,20 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
late final List<Map<String, Object?>> readingElementInfos;
final Future<List<Map<String, Object?>>> readingElementInfos_query =
connection.query(
JMdictTableNames.readingInfo,
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
whereArgs: readingIds,
);
JMdictTableNames.readingInfo,
where:
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
whereArgs: readingIds,
);
late final List<Map<String, Object?>> readingElementRestrictions;
final Future<List<Map<String, Object?>>> readingElementRestrictions_query =
connection.query(
JMdictTableNames.readingRestriction,
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
whereArgs: readingIds,
);
JMdictTableNames.readingRestriction,
where:
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
whereArgs: readingIds,
);
// Kanji queries
@@ -256,12 +258,13 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
.toList();
late final List<Map<String, Object?>> kanjiElementInfos;
final Future<List<Map<String, Object?>>> kanjiElementInfos_query =
connection.query(
JMdictTableNames.kanjiInfo,
where: '(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
whereArgs: kanjiIds,
);
final Future<List<Map<String, Object?>>> kanjiElementInfos_query = connection
.query(
JMdictTableNames.kanjiInfo,
where:
'(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
whereArgs: kanjiIds,
);
await Future.wait([
senseAntonyms_query.then((value) => senseAntonyms = value),
@@ -272,15 +275,18 @@ Future<LinearWordQueryData> fetchLinearWordQueryData(
senseLanguageSources_query.then((value) => senseLanguageSources = value),
senseMiscs_query.then((value) => senseMiscs = value),
sensePOSs_query.then((value) => sensePOSs = value),
senseRestrictedToKanjis_query
.then((value) => senseRestrictedToKanjis = value),
senseRestrictedToReadings_query
.then((value) => senseRestrictedToReadings = value),
senseRestrictedToKanjis_query.then(
(value) => senseRestrictedToKanjis = value,
),
senseRestrictedToReadings_query.then(
(value) => senseRestrictedToReadings = value,
),
senseSeeAlsos_query.then((value) => senseSeeAlsos = value),
exampleSentences_query.then((value) => exampleSentences = value),
readingElementInfos_query.then((value) => readingElementInfos = value),
readingElementRestrictions_query
.then((value) => readingElementRestrictions = value),
readingElementRestrictions_query.then(
(value) => readingElementRestrictions = value,
),
kanjiElementInfos_query.then((value) => kanjiElementInfos = value),
]);

View File

@@ -47,8 +47,10 @@ String _filterFTSSensitiveCharacters(String word) {
int? offset,
bool countOnly = false,
}) {
assert(tableName == JMdictTableNames.kanjiElement ||
tableName == JMdictTableNames.readingElement);
assert(
tableName == JMdictTableNames.kanjiElement ||
tableName == JMdictTableNames.readingElement,
);
assert(!countOnly || pageSize == null);
assert(!countOnly || offset == null);
assert(pageSize == null || pageSize > 0);
@@ -105,7 +107,7 @@ String _filterFTSSensitiveCharacters(String word) {
_filterFTSSensitiveCharacters(word),
if (pageSize != null) pageSize,
if (offset != null) offset,
]
],
);
}
@@ -121,18 +123,19 @@ Future<List<ScoredEntryId>> _queryKanji(
pageSize: pageSize,
offset: offset,
);
return connection.rawQuery(query, args).then((result) => result
.map((row) => ScoredEntryId(
row['entryId'] as int,
row['score'] as int,
))
.toList());
return connection
.rawQuery(query, args)
.then(
(result) => result
.map(
(row) =>
ScoredEntryId(row['entryId'] as int, row['score'] as int),
)
.toList(),
);
}
Future<int> _queryKanjiCount(
DatabaseExecutor connection,
String word,
) {
Future<int> _queryKanjiCount(DatabaseExecutor connection, String word) {
final (query, args) = _kanjiReadingTemplate(
JMdictTableNames.kanjiElement,
word,
@@ -155,18 +158,19 @@ Future<List<ScoredEntryId>> _queryKana(
pageSize: pageSize,
offset: offset,
);
return connection.rawQuery(query, args).then((result) => result
.map((row) => ScoredEntryId(
row['entryId'] as int,
row['score'] as int,
))
.toList());
return connection
.rawQuery(query, args)
.then(
(result) => result
.map(
(row) =>
ScoredEntryId(row['entryId'] as int, row['score'] as int),
)
.toList(),
);
}
Future<int> _queryKanaCount(
DatabaseExecutor connection,
String word,
) {
Future<int> _queryKanaCount(DatabaseExecutor connection, String word) {
final (query, args) = _kanjiReadingTemplate(
JMdictTableNames.readingElement,
word,
@@ -211,28 +215,15 @@ Future<List<ScoredEntryId>> _queryEnglish(
OFFSET ?
'''
.trim(),
[
word,
word,
word,
'%${word.replaceAll('%', '')}%',
pageSize,
offset,
],
[word, word, word, '%${word.replaceAll('%', '')}%', pageSize, offset],
);
return result
.map((row) => ScoredEntryId(
row['entryId'] as int,
row['score'] as int,
))
.map((row) => ScoredEntryId(row['entryId'] as int, row['score'] as int))
.toList();
}
Future<int> _queryEnglishCount(
DatabaseExecutor connection,
String word,
) async {
Future<int> _queryEnglishCount(DatabaseExecutor connection, String word) async {
final result = await connection.rawQuery(
'''
SELECT
@@ -242,9 +233,7 @@ Future<int> _queryEnglishCount(
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?
'''
.trim(),
[
'%$word%',
],
['%$word%'],
);
return result.first['count'] as int;
@@ -261,46 +250,26 @@ Future<List<ScoredEntryId>> fetchEntryIds(
searchMode = _determineSearchMode(word);
}
assert(
word.isNotEmpty,
'Word should not be empty when fetching entry IDs',
);
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
late final List<ScoredEntryId> entryIds;
switch (searchMode) {
case SearchMode.Kanji:
entryIds = await _queryKanji(
connection,
word,
pageSize,
offset,
);
entryIds = await _queryKanji(connection, word, pageSize, offset);
break;
case SearchMode.Kana:
entryIds = await _queryKana(
connection,
word,
pageSize,
offset,
);
entryIds = await _queryKana(connection, word, pageSize, offset);
break;
case SearchMode.English:
entryIds = await _queryEnglish(
connection,
word,
pageSize,
offset,
);
entryIds = await _queryEnglish(connection, word, pageSize, offset);
break;
case SearchMode.MixedKana:
case SearchMode.MixedKanji:
default:
throw UnimplementedError(
'Search mode $searchMode is not implemented',
);
throw UnimplementedError('Search mode $searchMode is not implemented');
}
;
@@ -316,41 +285,27 @@ Future<int?> fetchEntryIdCount(
searchMode = _determineSearchMode(word);
}
assert(
word.isNotEmpty,
'Word should not be empty when fetching entry IDs',
);
assert(word.isNotEmpty, 'Word should not be empty when fetching entry IDs');
late final int? entryIdCount;
switch (searchMode) {
case SearchMode.Kanji:
entryIdCount = await _queryKanjiCount(
connection,
word,
);
entryIdCount = await _queryKanjiCount(connection, word);
break;
case SearchMode.Kana:
entryIdCount = await _queryKanaCount(
connection,
word,
);
entryIdCount = await _queryKanaCount(connection, word);
break;
case SearchMode.English:
entryIdCount = await _queryEnglishCount(
connection,
word,
);
entryIdCount = await _queryEnglishCount(connection, word);
break;
case SearchMode.MixedKana:
case SearchMode.MixedKanji:
default:
throw UnimplementedError(
'Search mode $searchMode is not implemented',
);
throw UnimplementedError('Search mode $searchMode is not implemented');
}
return entryIdCount;

View File

@@ -39,8 +39,9 @@ List<WordSearchResult> regroupWordSearchResults({
}) {
final List<WordSearchResult> results = [];
final commonEntryIds =
commonEntries.map((entry) => entry['entryId'] as int).toSet();
final commonEntryIds = commonEntries
.map((entry) => entry['entryId'] as int)
.toSet();
for (final scoredEntryId in entryIds) {
final List<Map<String, Object?>> entryReadingElements = readingElements
@@ -55,7 +56,8 @@ List<WordSearchResult> regroupWordSearchResults({
.where((element) => element['entryId'] == scoredEntryId.entryId)
.toList();
final jlptLevel = entryJlptTags
final jlptLevel =
entryJlptTags
.map((e) => JlptLevel.fromString(e['jlptLevel'] as String?))
.sorted((a, b) => b.compareTo(a))
.firstOrNull ??
@@ -102,10 +104,7 @@ List<WordSearchResult> regroupWordSearchResults({
readingInfo: entryReadingElementsGrouped.readingInfos,
senses: entrySensesGrouped,
jlptLevel: jlptLevel,
sources: const WordSearchSources(
jmdict: true,
jmnedict: false,
),
sources: const WordSearchSources(jmdict: true, jmnedict: false),
),
);
}
@@ -135,8 +134,9 @@ GroupedWordResult _regroup_words({
}) {
final List<WordSearchRuby> rubys = [];
final kanjiElements_ =
kanjiElements.where((element) => element['entryId'] == entryId).toList();
final kanjiElements_ = kanjiElements
.where((element) => element['entryId'] == entryId)
.toList();
final readingElements_ = readingElements
.where((element) => element['entryId'] == entryId)
@@ -148,9 +148,7 @@ GroupedWordResult _regroup_words({
for (final readingElement in readingElements_) {
if (readingElement['doesNotMatchKanji'] == 1 || kanjiElements_.isEmpty) {
final ruby = WordSearchRuby(
base: readingElement['reading'] as String,
);
final ruby = WordSearchRuby(base: readingElement['reading'] as String);
rubys.add(ruby);
continue;
@@ -169,18 +167,12 @@ GroupedWordResult _regroup_words({
continue;
}
final ruby = WordSearchRuby(
base: kanji,
furigana: reading,
);
final ruby = WordSearchRuby(base: kanji, furigana: reading);
rubys.add(ruby);
}
}
assert(
rubys.isNotEmpty,
'No readings found for entryId: $entryId',
);
assert(rubys.isNotEmpty, 'No readings found for entryId: $entryId');
final Map<int, String> readingElementIdsToReading = {
for (final element in readingElements_)
@@ -210,7 +202,7 @@ GroupedWordResult _regroup_words({
kanjiInfos: {
for (final kei in kanjiElementInfos_)
kanjiElementIdsToReading[kei['elementId'] as int]!:
JMdictKanjiInfo.fromId(kei['info'] as String),
JMdictKanjiInfo.fromId(kei['info'] as String),
},
);
}
@@ -230,28 +222,38 @@ List<WordSearchSense> _regroup_senses({
required List<Map<String, Object?>> senseSeeAlsos,
required List<Map<String, Object?>> exampleSentences,
}) {
final groupedSenseAntonyms =
senseAntonyms.groupListsBy((element) => element['senseId'] as int);
final groupedSenseDialects =
senseDialects.groupListsBy((element) => element['senseId'] as int);
final groupedSenseFields =
senseFields.groupListsBy((element) => element['senseId'] as int);
final groupedSenseGlossaries =
senseGlossaries.groupListsBy((element) => element['senseId'] as int);
final groupedSenseInfos =
senseInfos.groupListsBy((element) => element['senseId'] as int);
final groupedSenseLanguageSources =
senseLanguageSources.groupListsBy((element) => element['senseId'] as int);
final groupedSenseMiscs =
senseMiscs.groupListsBy((element) => element['senseId'] as int);
final groupedSensePOSs =
sensePOSs.groupListsBy((element) => element['senseId'] as int);
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis
.groupListsBy((element) => element['senseId'] as int);
final groupedSenseAntonyms = senseAntonyms.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseDialects = senseDialects.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseFields = senseFields.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseGlossaries = senseGlossaries.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseInfos = senseInfos.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseLanguageSources = senseLanguageSources.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseMiscs = senseMiscs.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSensePOSs = sensePOSs.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseRestrictedToKanjis = senseRestrictedToKanjis.groupListsBy(
(element) => element['senseId'] as int,
);
final groupedSenseRestrictedToReadings = senseRestrictedToReadings
.groupListsBy((element) => element['senseId'] as int);
final groupedSenseSeeAlsos =
senseSeeAlsos.groupListsBy((element) => element['senseId'] as int);
final groupedSenseSeeAlsos = senseSeeAlsos.groupListsBy(
(element) => element['senseId'] as int,
);
final List<WordSearchSense> result = [];
for (final sense in senses) {
@@ -272,43 +274,53 @@ List<WordSearchSense> _regroup_senses({
final resultSense = WordSearchSense(
englishDefinitions: glossaries.map((e) => e['phrase'] as String).toList(),
partsOfSpeech:
pos.map((e) => JMdictPOS.fromId(e['pos'] as String)).toList(),
partsOfSpeech: pos
.map((e) => JMdictPOS.fromId(e['pos'] as String))
.toList(),
seeAlso: seeAlsos
.map((e) => WordSearchXrefEntry(
entryId: e['xrefEntryId'] as int,
baseWord: e['base'] as String,
furigana: e['furigana'] as String?,
ambiguous: e['ambiguous'] == 1,
))
.map(
(e) => WordSearchXrefEntry(
entryId: e['xrefEntryId'] as int,
baseWord: e['base'] as String,
furigana: e['furigana'] as String?,
ambiguous: e['ambiguous'] == 1,
),
)
.toList(),
antonyms: antonyms
.map((e) => WordSearchXrefEntry(
entryId: e['xrefEntryId'] as int,
baseWord: e['base'] as String,
furigana: e['furigana'] as String?,
ambiguous: e['ambiguous'] == 1,
))
.map(
(e) => WordSearchXrefEntry(
entryId: e['xrefEntryId'] as int,
baseWord: e['base'] as String,
furigana: e['furigana'] as String?,
ambiguous: e['ambiguous'] == 1,
),
)
.toList(),
restrictedToReading: restrictedToReadings
.map((e) => e['reading'] as String)
.toList(),
restrictedToKanji: restrictedToKanjis
.map((e) => e['kanji'] as String)
.toList(),
fields: fields
.map((e) => JMdictField.fromId(e['field'] as String))
.toList(),
restrictedToReading:
restrictedToReadings.map((e) => e['reading'] as String).toList(),
restrictedToKanji:
restrictedToKanjis.map((e) => e['kanji'] as String).toList(),
fields:
fields.map((e) => JMdictField.fromId(e['field'] as String)).toList(),
dialects: dialects
.map((e) => JMdictDialect.fromId(e['dialect'] as String))
.toList(),
misc: miscs.map((e) => JMdictMisc.fromId(e['misc'] as String)).toList(),
info: infos.map((e) => e['info'] as String).toList(),
languageSource: languageSources
.map((e) => WordSearchSenseLanguageSource(
language: e['language'] as String,
phrase: e['phrase'] as String?,
fullyDescribesSense: e['fullyDescribesSense'] == 1,
constructedFromSmallerWords:
e['constructedFromSmallerWords'] == 1,
))
.map(
(e) => WordSearchSenseLanguageSource(
language: e['language'] as String,
phrase: e['phrase'] as String?,
fullyDescribesSense: e['fullyDescribesSense'] == 1,
constructedFromSmallerWords:
e['constructedFromSmallerWords'] == 1,
),
)
.toList(),
);

View File

@@ -13,14 +13,7 @@ import 'package:jadb/search/word_search/regrouping.dart';
import 'package:jadb/table_names/jmdict.dart';
import 'package:sqflite_common/sqlite_api.dart';
enum SearchMode {
Auto,
English,
Kanji,
MixedKanji,
Kana,
MixedKana,
}
enum SearchMode { Auto, English, Kanji, MixedKanji, Kana, MixedKana }
Future<List<WordSearchResult>?> searchWordWithDbConnection(
DatabaseExecutor connection,
@@ -49,9 +42,9 @@ Future<List<WordSearchResult>?> searchWordWithDbConnection(
final LinearWordQueryData linearWordQueryData =
await fetchLinearWordQueryData(
connection,
entryIds.map((e) => e.entryId).toList(),
);
connection,
entryIds.map((e) => e.entryId).toList(),
);
final result = regroupWordSearchResults(
entryIds: entryIds,
@@ -106,20 +99,19 @@ Future<WordSearchResult?> getWordByIdWithDbConnection(
return null;
}
final exists = await connection.rawQuery(
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
[id],
).then((value) => value.isNotEmpty && value.first.values.first == 1);
final exists = await connection
.rawQuery(
'SELECT EXISTS(SELECT 1 FROM "${JMdictTableNames.entry}" WHERE "entryId" = ?)',
[id],
)
.then((value) => value.isNotEmpty && value.first.values.first == 1);
if (!exists) {
return null;
}
final LinearWordQueryData linearWordQueryData =
await fetchLinearWordQueryData(
connection,
[id],
);
await fetchLinearWordQueryData(connection, [id]);
final result = regroupWordSearchResults(
entryIds: [ScoredEntryId(id, 0)],

View File

@@ -20,23 +20,23 @@ abstract class JMdictTableNames {
static const String senseSeeAlso = 'JMdict_SenseSeeAlso';
static Set<String> get allTables => {
entry,
kanjiElement,
kanjiInfo,
readingElement,
readingInfo,
readingRestriction,
sense,
senseAntonyms,
senseDialect,
senseField,
senseGlossary,
senseInfo,
senseMisc,
sensePOS,
senseLanguageSource,
senseRestrictedToKanji,
senseRestrictedToReading,
senseSeeAlso
};
entry,
kanjiElement,
kanjiInfo,
readingElement,
readingInfo,
readingRestriction,
sense,
senseAntonyms,
senseDialect,
senseField,
senseGlossary,
senseInfo,
senseMisc,
sensePOS,
senseLanguageSource,
senseRestrictedToKanji,
senseRestrictedToReading,
senseSeeAlso,
};
}

View File

@@ -17,19 +17,19 @@ abstract class KANJIDICTableNames {
static const String nanori = 'KANJIDIC_Nanori';
static Set<String> get allTables => {
character,
radicalName,
codepoint,
radical,
strokeMiscount,
variant,
dictionaryReference,
dictionaryReferenceMoro,
queryCode,
reading,
kunyomi,
onyomi,
meaning,
nanori
};
character,
radicalName,
codepoint,
radical,
strokeMiscount,
variant,
dictionaryReference,
dictionaryReferenceMoro,
queryCode,
reading,
kunyomi,
onyomi,
meaning,
nanori,
};
}

View File

@@ -1,7 +1,5 @@
abstract class RADKFILETableNames {
static const String radkfile = 'RADKFILE';
static Set<String> get allTables => {
radkfile,
};
static Set<String> get allTables => {radkfile};
}

View File

@@ -288,15 +288,8 @@ extension on DateTime {
}
}
String get japaneseWeekdayPrefix => [
'',
'',
'',
'',
'',
'',
'',
][weekday - 1];
String get japaneseWeekdayPrefix =>
['', '', '', '', '', '', ''][weekday - 1];
/// Returns the date in Japanese format.
String japaneseDate({bool showWeekday = false}) =>

View File

@@ -12,10 +12,7 @@ enum WordClass {
input,
}
enum LemmatizationRuleType {
prefix,
suffix,
}
enum LemmatizationRuleType { prefix, suffix }
class LemmatizationRule {
final String name;
@@ -46,18 +43,18 @@ class LemmatizationRule {
lookAheadBehind = const [''],
LemmatizationRuleType type = LemmatizationRuleType.suffix,
}) : this(
name: name,
pattern: AllomorphPattern(
patterns: {
pattern: replacement != null ? [replacement] : null
},
type: type,
lookAheadBehind: lookAheadBehind,
),
validChildClasses: validChildClasses,
terminal: terminal,
wordClass: wordClass,
);
name: name,
pattern: AllomorphPattern(
patterns: {
pattern: replacement != null ? [replacement] : null,
},
type: type,
lookAheadBehind: lookAheadBehind,
),
validChildClasses: validChildClasses,
terminal: terminal,
wordClass: wordClass,
);
}
/// Represents a set of patterns for matching allomorphs in a word.
@@ -132,8 +129,8 @@ class AllomorphPattern {
if (word.startsWith(p as String)) {
return patterns[affix] != null
? patterns[affix]!
.map((s) => s + word.substring(affix.length))
.toList()
.map((s) => s + word.substring(affix.length))
.toList()
: [word.substring(affix.length)];
}
break;
@@ -239,9 +236,6 @@ Lemmatized lemmatize(String word) {
return Lemmatized(
original: word,
rule: inputRule,
children: _lemmatize(
inputRule,
word,
),
children: _lemmatize(inputRule, word),
);
}

View File

@@ -534,16 +534,14 @@ bool _hasDoubleConsonant(String for_conversion, int length) =>
RegExp(r'^([kgsztdnbpmyrlwchf])\1$').hasMatch(for_conversion));
String transliterateLatinToHiragana(String latin) {
String romaji =
latin.toLowerCase().replaceAll('mb', 'nb').replaceAll('mp', 'np');
String romaji = latin
.toLowerCase()
.replaceAll('mb', 'nb')
.replaceAll('mp', 'np');
String kana = '';
while (romaji.isNotEmpty) {
final lengths = [
if (romaji.length > 2) 3,
if (romaji.length > 1) 2,
1,
];
final lengths = [if (romaji.length > 2) 3, if (romaji.length > 1) 2, 1];
for (final length in lengths) {
String? mora;
@@ -579,11 +577,11 @@ String _transposeCodepointsInRange(
int distance,
int rangeStart,
int rangeEnd,
) =>
String.fromCharCodes(
text.codeUnits
.map((c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0)),
);
) => String.fromCharCodes(
text.codeUnits.map(
(c) => c + ((rangeStart <= c && c <= rangeEnd) ? distance : 0),
),
);
String transliterateKanaToLatin(String kana) =>
transliterateHiraganaToLatin(transliterateKatakanaToHiragana(kana));
@@ -599,12 +597,7 @@ String transliterateHiraganaToKatakana(String hiragana) =>
String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
_transposeCodepointsInRange(
_transposeCodepointsInRange(
halfwidth,
-65248,
65281,
65374,
),
_transposeCodepointsInRange(halfwidth, -65248, 65281, 65374),
-12256,
12288,
12288,
@@ -612,12 +605,7 @@ String transliterateFullwidthRomajiToHalfwidth(String halfwidth) =>
String transliterateHalfwidthRomajiToFullwidth(String halfwidth) =>
_transposeCodepointsInRange(
_transposeCodepointsInRange(
halfwidth,
65248,
33,
126,
),
_transposeCodepointsInRange(halfwidth, 65248, 33, 126),
12256,
32,
32,

View File

@@ -7,25 +7,22 @@ void main() {
test("Filter kanji", () async {
final connection = await setup_database_connection();
final result = await connection.filterKanji(
[
"a",
"b",
"c",
"",
"",
"",
"",
"",
"",
".",
"!",
"@",
";",
"",
],
deduplicate: false,
);
final result = await connection.filterKanji([
"a",
"b",
"c",
"",
"",
"",
"",
"",
"",
".",
"!",
"@",
";",
"",
], deduplicate: false);
expect(result.join(), "漢字地字");
});