Compare commits
46 Commits
v0.6.0
...
add-kanjiv
| Author | SHA1 | Date | |
|---|---|---|---|
|
8df3db4c77
|
|||
|
d1a6f39cca
|
|||
|
a222b2d9b8
|
|||
|
6364457d9e
|
|||
|
5d26b41524
|
|||
|
114febbe02
|
|||
|
20243dec09
|
|||
|
f6de8680ad
|
|||
|
99218a6987
|
|||
|
e8ee1ab944
|
|||
|
4f320e4ea9
|
|||
|
9c9f5543c8
|
|||
|
be493a6150
|
|||
|
8d742b92be
|
|||
|
9b9c771eff
|
|||
|
eebeaba0e0
|
|||
|
61ac226fc3
|
|||
|
ede57a7a00
|
|||
|
2ad1e038f1
|
|||
|
f40825de65
|
|||
|
5aa068eaec
|
|||
|
170c3a853e
|
|||
|
c70838d1bf
|
|||
|
0f7854a4fc
|
|||
|
a86f857553
|
|||
|
d14e3909d4
|
|||
|
bb44bf786a
|
|||
|
ad3343a01e
|
|||
|
16d72e94ba
|
|||
|
b070a1fd31
|
|||
|
dcf5c8ebe7
|
|||
|
1f8bc8bac5
|
|||
|
ab28b5788b
|
|||
|
dd7b2917dc
|
|||
|
74798c77b5
|
|||
|
63a4caa626
|
|||
|
374be5ca6b
|
|||
|
4a6fd41f31
|
|||
|
c06fff9e5a
|
|||
|
1d9928ade1
|
|||
|
1a3b04be00
|
|||
|
c0c6f97a01
|
|||
|
a954188d5d
|
|||
|
5b86d6eb67
|
|||
|
72f31e974b
|
|||
|
e824dc0a22
|
70
.gitea/workflows/build-and-test.yml
Normal file
70
.gitea/workflows/build-and-test.yml
Normal file
@@ -0,0 +1,70 @@
|
||||
name: "Build and test"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install sudo
|
||||
run: apt-get update && apt-get -y install sudo
|
||||
|
||||
- name: Install nix
|
||||
uses: https://github.com/cachix/install-nix-action@v31
|
||||
with:
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
show-trace = true
|
||||
max-jobs = auto
|
||||
trusted-users = root
|
||||
experimental-features = nix-command flakes
|
||||
build-users-group =
|
||||
|
||||
- name: Update database inputs
|
||||
run: nix flake update datasources
|
||||
|
||||
- name: Build database
|
||||
run: nix build .#database -L
|
||||
|
||||
- name: Upload database as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: jadb-${{ gitea.sha }}.zip
|
||||
path: result/jadb.sqlite
|
||||
if-no-files-found: error
|
||||
retention-days: 15
|
||||
# Already compressed
|
||||
compression: 0
|
||||
|
||||
- name: Print database statistics
|
||||
run: nix develop .#sqlite-debugging --command sqlite3_analyzer result/jadb.sqlite
|
||||
|
||||
# TODO: Defer failure of tests until after the coverage report is generated and uploaded.
|
||||
- name: Run tests
|
||||
run: nix develop .# --command dart run test --concurrency=1 --coverage-path=coverage/lcov.info
|
||||
|
||||
- name: Generate coverage report
|
||||
run: |
|
||||
GENHTML_ARGS=(
|
||||
--current-date="$(date)"
|
||||
--dark-mode
|
||||
--output-directory coverage/report
|
||||
)
|
||||
|
||||
nix develop .# --command genhtml "${GENHTML_ARGS[@]}" coverage/lcov.info
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: https://git.pvv.ntnu.no/Projects/rsync-action@v2
|
||||
with:
|
||||
source: ./coverage
|
||||
target: jadb/${{ gitea.ref_name }}/
|
||||
username: oysteikt
|
||||
ssh-key: ${{ secrets.OYSTEIKT_GITEA_WEBDOCS_SSH_KEY }}
|
||||
host: microbel.pvv.ntnu.no
|
||||
known-hosts: "microbel.pvv.ntnu.no ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEq0yasKP0mH6PI6ypmuzPzMnbHELo9k+YB5yW534aKudKZS65YsHJKQ9vapOtmegrn5MQbCCgrshf+/XwZcjbM="
|
||||
|
||||
- name: Run benchmarks
|
||||
run: nix develop .# --command dart run benchmark_harness:bench --flavor jit
|
||||
@@ -1,38 +0,0 @@
|
||||
name: "Build database"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
jobs:
|
||||
evals:
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install sudo
|
||||
run: apt-get update && apt-get -y install sudo
|
||||
|
||||
- name: Install nix
|
||||
uses: https://github.com/cachix/install-nix-action@v31
|
||||
|
||||
- name: Configure nix
|
||||
run: echo -e "show-trace = true\nmax-jobs = auto\ntrusted-users = root\nexperimental-features = nix-command flakes\nbuild-users-group =" > /etc/nix/nix.conf
|
||||
|
||||
- name: Update database inputs
|
||||
run: |
|
||||
nix flake update jmdict-src
|
||||
nix flake update jmdict-with-examples-src
|
||||
nix flake update radkfile-src
|
||||
nix flake update kanjidic2-src
|
||||
|
||||
- name: Build database
|
||||
run: nix build .#database -L
|
||||
|
||||
- name: Upload database as artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: jadb-${{ gitea.sha }}.zip
|
||||
path: result/jadb.sqlite
|
||||
if-no-files-found: error
|
||||
retention-days: 15
|
||||
# Already compressed
|
||||
compression: 0
|
||||
@@ -1,31 +0,0 @@
|
||||
name: "Run tests"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
jobs:
|
||||
evals:
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install sudo
|
||||
run: apt-get update && apt-get -y install sudo
|
||||
|
||||
- name: Install nix
|
||||
uses: https://github.com/cachix/install-nix-action@v31
|
||||
|
||||
- name: Configure nix
|
||||
run: echo -e "show-trace = true\nmax-jobs = auto\ntrusted-users = root\nexperimental-features = nix-command flakes\nbuild-users-group =" > /etc/nix/nix.conf
|
||||
|
||||
- name: Update database inputs
|
||||
run: |
|
||||
nix flake update jmdict-src
|
||||
nix flake update jmdict-with-examples-src
|
||||
nix flake update radkfile-src
|
||||
nix flake update kanjidic2-src
|
||||
|
||||
- name: Build database
|
||||
run: nix build .#database -L
|
||||
|
||||
- name: Run tests
|
||||
run: nix develop .# --command dart test
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,6 +8,7 @@
|
||||
# Conventional directory for build output.
|
||||
/doc/
|
||||
/build/
|
||||
/coverage/
|
||||
main.db
|
||||
|
||||
# Nix
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# jadb
|
||||
|
||||
[](https://builtwithnix.org)
|
||||
|
||||
[Latest coverage report](https://www.pvv.ntnu.no/~oysteikt/gitea/jadb/main/coverage/report/)
|
||||
|
||||
# jadb
|
||||
|
||||
An SQLite database containing open source japanese dictionary data combined from several sources
|
||||
|
||||
Note that while the license for the code is MIT, the data has various licenses.
|
||||
@@ -16,3 +18,4 @@ Note that while the license for the code is MIT, the data has various licenses.
|
||||
| **Tanos JLPT levels:** | https://www.tanos.co.uk/jlpt/ |
|
||||
| **Kangxi Radicals:** | https://ctext.org/kangxi-zidian |
|
||||
|
||||
See [docs/overview.md](./docs/overview.md) for notes and implementation details.
|
||||
|
||||
5
benchmark/benchmark.dart
Normal file
5
benchmark/benchmark.dart
Normal file
@@ -0,0 +1,5 @@
|
||||
import './search/word_search.dart';
|
||||
|
||||
Future<void> main() async {
|
||||
await WordSearchBenchmark.main();
|
||||
}
|
||||
45
benchmark/search/word_search.dart
Normal file
45
benchmark/search/word_search.dart
Normal file
@@ -0,0 +1,45 @@
|
||||
import 'package:benchmark_harness/benchmark_harness.dart';
|
||||
import 'package:jadb/search.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
import '../../test/search/setup_database_connection.dart';
|
||||
|
||||
class WordSearchBenchmark extends AsyncBenchmarkBase {
|
||||
Database? connection;
|
||||
|
||||
static final List<String> searchTerms = [
|
||||
'kana',
|
||||
'kanji',
|
||||
'kawaii',
|
||||
'sushi',
|
||||
'ramen',
|
||||
];
|
||||
|
||||
WordSearchBenchmark() : super('WordSearchBenchmark');
|
||||
|
||||
static Future<void> main() async {
|
||||
print('Running WordSearchBenchmark...');
|
||||
await WordSearchBenchmark().report();
|
||||
print('Finished WordSearchBenchmark');
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> setup() async {
|
||||
connection = await setupDatabaseConnection();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> run() async {
|
||||
for (final term in searchTerms) {
|
||||
await connection!.jadbSearchWord(term);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> teardown() async {
|
||||
await connection?.close();
|
||||
}
|
||||
|
||||
// @override
|
||||
// Future<void> exercise() => run();
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,582 +0,0 @@
|
||||
,あ,Ah
|
||||
,ああ,like that
|
||||
間,あいだ,a space
|
||||
合う,あう,to match
|
||||
,あかちゃん,baby
|
||||
上る,あがる,to rise
|
||||
赤ん坊,あかんぼう,baby
|
||||
空く,あく,"to open, to become empty"
|
||||
,あげる,to give
|
||||
浅い,あさい,"shallow, superficial"
|
||||
味,あじ,flavour
|
||||
明日,あす・あした,tomorrow
|
||||
遊び,あそび,play
|
||||
集る,あつまる,to gather
|
||||
集める,あつめる,to collect something
|
||||
謝る,あやまる,to apologize
|
||||
安心,あんしん,relief
|
||||
安全,あんぜん,safety
|
||||
,あんな,such
|
||||
以下,いか,less than
|
||||
以外,いがい,with the exception of
|
||||
医学,いがく,medical science
|
||||
生きる,いきる,to live
|
||||
意見,いけん,opinion
|
||||
石,いし,stone
|
||||
,いじめる,to tease
|
||||
以上,いじょう,"more than, this is all"
|
||||
急ぐ,いそぐ,to hurry
|
||||
致す,いたす,(humble) to do
|
||||
一度,いちど,once
|
||||
一生懸命,いっしょうけんめい,with utmost effort
|
||||
,いっぱい,full
|
||||
糸,いと,thread
|
||||
以内,いない,within
|
||||
田舎,いなか,countryside
|
||||
祈る,いのる,to pray
|
||||
,いらっしゃる,"(respectful) to be, to come or to go"
|
||||
植える,うえる,"to plant, to grow"
|
||||
受付,うけつけ,receipt
|
||||
受ける,うける,to take a lesson or test
|
||||
動く,うごく,to move
|
||||
,うち,within
|
||||
打つ,うつ,to hit
|
||||
美しい,うつくしい,beautiful
|
||||
写す,うつす,to copy or photograph
|
||||
移る,うつる,to move house or transfer
|
||||
腕,うで,arm
|
||||
裏,うら,reverse side
|
||||
売り場,うりば,place where things are sold
|
||||
,うん,(informal) yes
|
||||
運転手,うんてんしゅ,driver
|
||||
枝,えだ,"branch, twig"
|
||||
選ぶ,えらぶ,to choose
|
||||
遠慮,えんりょ・する,"to be reserved, to be restrained"
|
||||
,おいでになる,(respectful) to be
|
||||
お祝い,おいわい,congratulation
|
||||
,おかげ,"owing to, thanks to"
|
||||
,おかしい,strange or funny
|
||||
億,おく,one hundred million
|
||||
屋上,おくじょう,rooftop
|
||||
贈り物,おくりもの,gift
|
||||
送る,おくる,to send
|
||||
遅れる,おくれる,to be late
|
||||
起す,おこす,to wake
|
||||
行う,おこなう,to do
|
||||
怒る,おこる,"to get angry, to be angry"
|
||||
押し入れ,おしいれ,closet
|
||||
お嬢さん,おじょうさん,young lady
|
||||
お宅,おたく,(polite) your house
|
||||
落る,おちる,to fall or drop
|
||||
,おっしゃる,(respectful) to say
|
||||
夫,おっと,husband
|
||||
,おつり,"change from purchase, balance"
|
||||
音,おと,"sound, note"
|
||||
落す,おとす,to drop
|
||||
踊り,おどり,a dance
|
||||
踊る,おどる,to dance
|
||||
驚く,おどろく,to be surprised
|
||||
お祭り,おまつり,festival
|
||||
お見舞い,おみまい,"calling on someone who is ill, enquiry"
|
||||
お土産,おみやげ,souvenir
|
||||
思い出す,おもいだす,to remember
|
||||
思う,おもう,"to think, to feel"
|
||||
,おもちゃ,toy
|
||||
表,おもて,the front
|
||||
親,おや,parents
|
||||
泳ぎ方,およぎかた,way of swimming
|
||||
下りる,おりる,"to get off, to descend"
|
||||
折る,おる,to break or to fold
|
||||
お礼,おれい,expression of gratitude
|
||||
折れる,おれる,to break or be folded
|
||||
終わり,おわり,the end
|
||||
海岸,かいがん,coast
|
||||
会議,かいぎ,meeting
|
||||
会議室,かいぎしつ,meeting room
|
||||
会場,かいじょう,assembly hall or meeting place
|
||||
会話,かいわ,conversation
|
||||
帰り,かえり,return
|
||||
変える,かえる,to change
|
||||
科学,かがく,science
|
||||
鏡,かがみ,mirror
|
||||
掛ける,かける,to hang something
|
||||
飾る,かざる,to decorate
|
||||
火事,かじ,fire
|
||||
,ガス,gas
|
||||
堅/硬/固い,かたい,hard
|
||||
形,かたち,shape
|
||||
片付ける,かたづける,to tidy up
|
||||
課長,かちょう,section manager
|
||||
勝つ,かつ,to win
|
||||
家内,かない,housewife
|
||||
悲しい,かなしい,sad
|
||||
必ず,かならず,"certainly,necessarily"
|
||||
お・金持ち,かねもち/おかねもち,rich man
|
||||
彼女,かのじょ,"she,girlfriend"
|
||||
壁,かべ,wall
|
||||
髪,かみ,hair
|
||||
噛む,かむ,"to bite,to chew"
|
||||
通う,かよう,to commute
|
||||
彼,かれ,"he,boyfriend"
|
||||
乾く,かわく,to get dry
|
||||
代わり,かわり,"substitute,alternate"
|
||||
変わる,かわる,to change
|
||||
考える,かんがえる,to consider
|
||||
関係,かんけい,relationship
|
||||
看護師,かんごし, nurse
|
||||
簡単,かんたん,simple
|
||||
気,き,"spirit,mood"
|
||||
機会,きかい,opportunity
|
||||
危険,きけん,danger
|
||||
聞こえる,きこえる,to be heard
|
||||
汽車,きしゃ,steam train
|
||||
技術,ぎじゅつ,"art,technology,skill"
|
||||
季節,きせつ,season
|
||||
規則,きそく,regulations
|
||||
,きっと,surely
|
||||
絹,きぬ,silk
|
||||
厳しい,きびしい,strict
|
||||
気分,きぶん,mood
|
||||
決る,きまる,to be decided
|
||||
君,きみ,(informal) You
|
||||
決める,きめる,to decide
|
||||
気持ち,きもち,"feeling,mood"
|
||||
着物,きもの,kimono
|
||||
客,きゃく,"guest,customer"
|
||||
急,きゅう,"urgent, steep"
|
||||
急行,きゅうこう,"speedy, express"
|
||||
教育,きょういく,education
|
||||
教会,きょうかい,church
|
||||
競争,きょうそう,competition
|
||||
興味,きょうみ,an interest
|
||||
近所,きんじょ,neighbourhood
|
||||
具合,ぐあい,"condition,health"
|
||||
空気,くうき,"air,atmosphere"
|
||||
空港,くうこう,airport
|
||||
草,くさ,grass
|
||||
首,くび,neck
|
||||
雲,くも,cloud
|
||||
比べる,くらべる,to compare
|
||||
,くれる,to give
|
||||
暮れる,くれる,"to get dark,to come to an end"
|
||||
君,くん,suffix for familiar young male
|
||||
毛,け,hair or fur
|
||||
経済,けいざい,"finance,economy"
|
||||
警察,けいさつ,police
|
||||
景色,けしき,"scene,landscape"
|
||||
消しゴム,けしゴム,eraser
|
||||
下宿,げしゅく,lodging
|
||||
決して,けっして,never
|
||||
,けれど/けれども,however
|
||||
原因,げんいん,"cause,source"
|
||||
,けんか・する,to quarrel
|
||||
研究,けんきゅう,research
|
||||
研究室,けんきゅうしつ,"study room,laboratory"
|
||||
見物,けんぶつ,sightseeing
|
||||
子,こ,child
|
||||
,こう,this way
|
||||
郊外,こうがい,outskirts
|
||||
講義,こうぎ,lecture
|
||||
工業,こうぎょう,the manufacturing industry
|
||||
高校,こうこう,high school
|
||||
高校生,こうこうせい,high school student
|
||||
工場,こうじょう/こうば,"factory,plant,mill,workshop"
|
||||
校長,こうちょう,headmaster
|
||||
交通,こうつう,"traffic,transportation"
|
||||
講堂,こうどう,auditorium
|
||||
高等学校,こうとうがっこう,high school
|
||||
公務員,こうむいん,"civil servant, government worker"
|
||||
国際,こくさい,international
|
||||
心,こころ,"heart, mind, core"
|
||||
御主人,ごしゅじん,(honorable) your husband
|
||||
故障,こしょう・する,to break-down
|
||||
ご存じ,ごぞんじ,(respect form ) to know
|
||||
答,こたえ,response
|
||||
,ごちそう,a feast
|
||||
小鳥,ことり,small bird
|
||||
,このあいだ,"the other day,recently"
|
||||
,このごろ,"these days,nowadays"
|
||||
細かい,こまかい,"small, fine"
|
||||
込む,こむ,to include
|
||||
米,こめ,uncooked rice
|
||||
,ごらんになる,(respectful) to see
|
||||
,これから,after this
|
||||
怖い,こわい,frightening
|
||||
壊す,こわす,to break
|
||||
壊れる,こわれる,to be broken
|
||||
今度,こんど,"now,next time"
|
||||
今夜,こんや,tonight
|
||||
最近,さいきん,"latest,nowadays"
|
||||
最後,さいご,"last,end"
|
||||
最初,さいしょ,"beginning,first"
|
||||
坂,さか,"slope,hill"
|
||||
探す,さがす,to look for
|
||||
下る,さがる,"to get down,to descend"
|
||||
盛ん,さかん,"popularity,prosperous"
|
||||
下げる,さげる,"to hang,to lower,to move back"
|
||||
差し上げる,さしあげる,(polite) to give
|
||||
,さっき,some time ago
|
||||
寂しい,さびしい,lonely
|
||||
さ来月,さらいげつ,the month after next
|
||||
さ来週,さらいしゅう,the week after next
|
||||
騒ぐ,さわぐ,"to make noise,to be excited"
|
||||
触る,さわる,to touch
|
||||
産業,さんぎょう,industry
|
||||
残念,ざんねん,disappointment
|
||||
市,し,city
|
||||
字,じ,character
|
||||
試合,しあい,"match,game"
|
||||
仕方,しかた,method
|
||||
試験,しけん,examination
|
||||
事故,じこ,accident
|
||||
地震,じしん,earthquake
|
||||
時代,じだい,era
|
||||
下着,したぎ,underwear
|
||||
,しっかり,"firmly,steadily"
|
||||
失敗,しっぱい,"failure,mistake"
|
||||
辞典,じてん,dictionary
|
||||
品物,しなもの,goods
|
||||
,しばらく,little while
|
||||
島,しま,island
|
||||
市民,しみん,citizen
|
||||
事務所,じむしょ,office
|
||||
社会,しゃかい,"society,public"
|
||||
社長,しゃちょう,company president
|
||||
自由,じゆう,freedom
|
||||
習慣,しゅうかん,"custom,manners"
|
||||
住所,じゅうしょ,"an address,a residence"
|
||||
柔道,じゅうどう,judo
|
||||
十分,じゅうぶん,enough
|
||||
趣味,しゅみ,hobby
|
||||
紹介,しょうかい,introduction
|
||||
小学校,しょうがっこう,elementary school
|
||||
小説,しょうせつ,novel
|
||||
将来,しょうらい,"future,prospects"
|
||||
食料品,しょくりょうひん,groceries
|
||||
女性,じょせい,woman
|
||||
知らせる,しらせる,to notify
|
||||
調べる,しらべる,to investigate
|
||||
人口,じんこう,population
|
||||
神社,じんじゃ,Shinto shrine
|
||||
親切,しんせつ,kindness
|
||||
新聞社,しんぶんしゃ,newspaper company
|
||||
水泳,すいえい,swimming
|
||||
水道,すいどう,water supply
|
||||
数学,すうがく,"mathematics,arithmetic"
|
||||
過ぎる,すぎる,to exceed
|
||||
凄い,すごい,terrific
|
||||
進む,すすむ,to make progress
|
||||
,すっかり,completely
|
||||
,すっと,"straight,all of a sudden"
|
||||
捨てる,すてる,to throw away
|
||||
砂,すな,sand
|
||||
滑る,すべる,"to slide,to slip"
|
||||
隅,すみ,"corner,nook"
|
||||
済む,すむ,to finish
|
||||
,すり,pickpocket
|
||||
,すると,then
|
||||
生活,せいかつ・する,to live
|
||||
生産,せいさん・する,to produce
|
||||
政治,せいじ,"politics,government"
|
||||
西洋,せいよう,western countries
|
||||
世界,せかい,the world
|
||||
席,せき,seat
|
||||
説明,せつめい,explanation
|
||||
背中,せなか,back of the body
|
||||
線,せん,line
|
||||
戦争,せんそう,war
|
||||
先輩,せんぱい,senior
|
||||
,そう,really
|
||||
育てる,そだてる,"to rear,to bring up"
|
||||
卒業,そつぎょう,graduation
|
||||
祖父,そふ,grandfather
|
||||
祖母,そぼ,grandmother
|
||||
,それで,because of that
|
||||
,それに,moreover
|
||||
,それほど,to that extent
|
||||
,そろそろ,"gradually,soon"
|
||||
,そんな,that sort of
|
||||
,そんなに,"so much,like that"
|
||||
退院,たいいん・する,to leave hospital
|
||||
大学生,だいがくせい,university student
|
||||
大事,だいじ,"important,valuable,serious matter"
|
||||
大体,だいたい,generally
|
||||
,たいてい,usually
|
||||
大分,だいぶ,greatly
|
||||
台風,たいふう,typhoon
|
||||
倒れる,たおれる,to break down
|
||||
,だから,"so,therefore"
|
||||
確か,たしか,definite
|
||||
足す,たす,to add a number
|
||||
訪ねる,たずねる,to visit
|
||||
尋ねる,たずねる,to ask
|
||||
正しい,ただしい,correct
|
||||
畳,たたみ,Japanese straw mat
|
||||
立てる,たてる,to stand something up
|
||||
建てる,たてる,to build
|
||||
例えば,たとえば,for example
|
||||
棚,たな,shelves
|
||||
楽しみ,たのしみ,joy
|
||||
楽む,たのしむ,to enjoy oneself
|
||||
,たまに,occasionally
|
||||
為,ため,in order to
|
||||
足りる,たりる,to be enough
|
||||
男性,だんせい,male
|
||||
暖房,だんぼう,heating
|
||||
血,ち,blood
|
||||
,チェック・する,to check
|
||||
力,ちから,"strength,power"
|
||||
,ちっとも,not at all (used with a negative verb)
|
||||
,ちゃん,suffix for familiar person
|
||||
注意,ちゅうい,caution
|
||||
中学校,ちゅうがっこう,"junior high school,middle school"
|
||||
注射,ちゅうしゃ,injection
|
||||
駐車場,ちゅうしゃじょう,parking lot
|
||||
地理,ちり,geography
|
||||
捕まえる,つかまえる,to seize
|
||||
付く,つく,to be attached
|
||||
漬ける,つける,"to soak,to pickle"
|
||||
都合,つごう,"circumstances,convenience"
|
||||
伝える,つたえる,to report
|
||||
続く,つづく,to be continued
|
||||
続ける,つづける,to continue
|
||||
包む,つつむ,to wrap
|
||||
妻,つま,my wife
|
||||
,つもり,intention
|
||||
釣る,つる,to fish
|
||||
丁寧,ていねい,polite
|
||||
適当,てきとう,suitability
|
||||
手伝う,てつだう,to assist
|
||||
手袋,てぶくろ,glove
|
||||
寺,てら,temple
|
||||
点,てん,"point,dot"
|
||||
店員,てんいん,shop assistant
|
||||
天気予報,てんきよほう,weather forecast
|
||||
電灯,でんとう,electric light
|
||||
電報,でんぽう,telegram
|
||||
展覧会,てんらんかい,exhibition
|
||||
都,と,metropolitan
|
||||
道具,どうぐ,"tool,means"
|
||||
,とうとう,"finally, after all"
|
||||
動物園,どうぶつえん,zoo
|
||||
遠く,とおく,distant
|
||||
通る,とおる,to go through
|
||||
特に,とくに,"particularly,especially"
|
||||
特別,とくべつ,special
|
||||
,とこや,barber
|
||||
途中,とちゅう,on the way
|
||||
特急,とっきゅう,limited express train (faster than an express train)
|
||||
届ける,とどける,"to send, to deliver, to report"
|
||||
泊まる,とまる,to lodge at
|
||||
止める,とめる,to stop something
|
||||
取り替える,とりかえる,to exchange
|
||||
泥棒,どろぼう,thief
|
||||
,どんどん,more and more
|
||||
直す,なおす,"to fix,to repair"
|
||||
直る,なおる,"to be fixed,to be repaired"
|
||||
治る,なおる,"to be cured,to heal"
|
||||
泣く,なく,to weep
|
||||
無くなる,なくなる,"to disappear,to get lost"
|
||||
亡くなる,なくなる,to die
|
||||
投げる,なげる,to throw or cast away
|
||||
,なさる,(respectful) to do
|
||||
鳴る,なる,to sound
|
||||
,なるべく,as much as possible
|
||||
,なるほど,now I understand
|
||||
慣れる,なれる,to grow accustomed to
|
||||
苦い,にがい,bitter
|
||||
二階建て,にかいだて,two storied
|
||||
逃げる,にげる,to escape
|
||||
日記,にっき,journal
|
||||
入院,にゅういん・する,"to hospitalise, hospitalisation"
|
||||
入学,にゅうがく・する,to enter school or university
|
||||
似る,にる,to be similar
|
||||
人形,にんぎょう,"doll, figure"
|
||||
盗む,ぬすむ,to steal
|
||||
塗る,ぬる,"to paint, to colour, to plaster"
|
||||
,ぬれる,to get wet
|
||||
,ねだん,price
|
||||
熱,ねつ,fever
|
||||
寝坊,ねぼう,sleeping in late
|
||||
眠い,ねむい,sleepy
|
||||
眠る,ねむる,to sleep
|
||||
残る,のこる,to remain
|
||||
乗り換える,のりかえる,to change between buses or trains
|
||||
乗り物,のりもの,vehicle
|
||||
葉,は,leaf
|
||||
場合,ばあい,situation
|
||||
倍,ばい,double
|
||||
拝見,はいけん・する,(humble) to look at
|
||||
歯医者,はいしゃ,dentist
|
||||
運ぶ,はこぶ,to transport
|
||||
始める,はじめる,to begin
|
||||
場所,ばしょ,location
|
||||
,はず,it should be so
|
||||
恥ずかしい,はずかしい,embarrassed
|
||||
発音,はつおん,pronunciation
|
||||
,はっきり,clearly
|
||||
花見,はなみ,cherry-blossom viewing
|
||||
林,はやし,"woods,forester"
|
||||
払う,はらう,to pay
|
||||
番組,ばんぐみ,television or radio program
|
||||
反対,はんたい,opposition
|
||||
日,ひ,"day, sun"
|
||||
火,ひ,fire
|
||||
冷える,ひえる,to grow cold
|
||||
光,ひかり,light
|
||||
光る,ひかる,"to shine,to glitter"
|
||||
引き出し,ひきだし,"drawer,drawing out"
|
||||
,ひきだす,to withdraw
|
||||
,ひげ,beard
|
||||
飛行場,ひこうじょう,airport
|
||||
久しぶり,ひさしぶり,after a long time
|
||||
美術館,びじゅつかん,art gallery
|
||||
非常に,ひじょうに,extremely
|
||||
引っ越す,ひっこす,to move house
|
||||
必要,ひつよう,necessary
|
||||
,ひどい,awful
|
||||
開く,ひらく,to open an event
|
||||
昼間,ひるま,"daytime,during the day"
|
||||
昼休み,ひるやすみ,noon break
|
||||
拾う,ひろう,"to pick up,to gather"
|
||||
増える,ふえる,to increase
|
||||
深い,ふかい,deep
|
||||
複雑,ふくざつ,"complexity,complication"
|
||||
復習,ふくしゅう,revision
|
||||
部長,ぶちょう,head of a section
|
||||
普通,ふつう,"usually, or a train that stops at every station"
|
||||
,ぶどう,grapes
|
||||
太る,ふとる,to become fat
|
||||
布団,ふとん,"Japanese bedding, futon"
|
||||
舟,ふね,ship
|
||||
不便,ふべん,inconvenience
|
||||
踏む,ふむ,to step on
|
||||
降り出す,ふりだす,to start to rain
|
||||
文化,ぶんか,culture
|
||||
文学,ぶんがく,literature
|
||||
文法,ぶんぽう,grammar
|
||||
別,べつ,different
|
||||
変,へん,strange
|
||||
返事,へんじ,reply
|
||||
貿易,ぼうえき,trade
|
||||
法律,ほうりつ,law
|
||||
僕,ぼく,I (used by males)
|
||||
星,ほし,star
|
||||
,ほとんど,mostly
|
||||
,ほめる,to praise
|
||||
翻訳,ほんやく,translation
|
||||
参る,まいる,"(humble) to go,to come"
|
||||
負ける,まける,to lose
|
||||
,または,"or,otherwise"
|
||||
間違える,まちがえる,to make a mistake
|
||||
間に合う,まにあう,to be in time for
|
||||
周り,まわり,surroundings
|
||||
回る,まわる,to go around
|
||||
漫画,まんが,comic
|
||||
真中,まんなか,middle
|
||||
見える,みえる,to be in sight
|
||||
湖,みずうみ,lake
|
||||
味噌,みそ,"miso, soybean paste"
|
||||
見つかる,みつかる,to be discovered
|
||||
見つける,みつける,to discover
|
||||
皆,みな,everybody
|
||||
港,みなと,harbour
|
||||
向かう,むかう,to face
|
||||
迎える,むかえる,to go out to meet
|
||||
昔,むかし,"old times, old days, long ago, formerly"
|
||||
虫,むし,insect
|
||||
息子,むすこ,(humble) son
|
||||
娘,むすめ,(humble) daughter
|
||||
無理,むり,impossible
|
||||
召し上がる,めしあがる,(polite) to eat
|
||||
珍しい,めずらしい,rare
|
||||
申し上げる,もうしあげる,"(humble) to say,to tell"
|
||||
申す,もうす,"(humble) to be called,to say"
|
||||
,もうすぐ,soon
|
||||
,もし,if
|
||||
戻る,もどる,to turn back
|
||||
木綿,もめん,cotton
|
||||
森,もり,forest
|
||||
焼く,やく,"to bake,to grill"
|
||||
約束,やくそく,promise
|
||||
役に立つ,やくにたつ,to be helpful
|
||||
焼ける,やける,"to burn,to be roasted"
|
||||
優しい,やさしい,kind
|
||||
痩せる,やせる,to become thin
|
||||
,やっと,at last
|
||||
止む,やむ,to stop
|
||||
止める,やめる,to stop
|
||||
柔らかい,やわらかい,soft
|
||||
湯,ゆ,hot water
|
||||
指,ゆび,finger
|
||||
指輪,ゆびわ,a ring
|
||||
夢,ゆめ,dream
|
||||
揺れる,ゆれる,"to shake,to sway"
|
||||
用,よう,use
|
||||
用意,ようい,preparation
|
||||
用事,ようじ,things to do
|
||||
汚れる,よごれる,to get dirty
|
||||
予習,よしゅう,preparation for a lesson
|
||||
予定,よてい,arrangement
|
||||
予約,よやく,reservation
|
||||
寄る,よる,to visit
|
||||
喜ぶ,よろこぶ,to be delighted
|
||||
理由,りゆう,reason
|
||||
利用,りよう,utilization
|
||||
両方,りょうほう,both sides
|
||||
旅館,りょかん,Japanese hotel
|
||||
留守,るす,absence
|
||||
冷房,れいぼう,air conditioning
|
||||
歴史,れきし,history
|
||||
連絡,れんらく,contact
|
||||
沸かす,わかす,"to boil,to heat"
|
||||
別れる,わかれる,to separate
|
||||
沸く,わく,"to boil, to grow hot,to get excited"
|
||||
訳,わけ,"meaning,reason"
|
||||
忘れ物,わすれもの,lost article
|
||||
笑う,わらう,"to laugh,to smile"
|
||||
割合,わりあい,"rate,ratio,percentage"
|
||||
割れる,われる,to break
|
||||
,アクセサリー,accessory
|
||||
,アジア,Asia
|
||||
,アナウンサー,announcer
|
||||
,アフリカ,Africa
|
||||
,アメリカ,America
|
||||
,アルコール,alcohol
|
||||
,アルバイト,part-time job
|
||||
,エスカレーター,escalator
|
||||
,オートバイ,motorcycle
|
||||
,カーテン,curtain
|
||||
,ガス,gas
|
||||
,ガソリン,petrol
|
||||
,ガソリンスタンド,petrol station
|
||||
,ガラス,a glass pane
|
||||
,ケーキ,cake
|
||||
消しゴム,けしゴム,"eraser, rubber"
|
||||
,コンサート,concert
|
||||
,コンピューター,computer
|
||||
,サラダ,salad
|
||||
,サンダル,sandal
|
||||
,サンドイッチ,sandwich
|
||||
,ジャム,jam
|
||||
,スーツ,suit
|
||||
,スーツケース,suitcase
|
||||
,スクリーン,screen
|
||||
,ステーキ,steak
|
||||
,ステレオ,stereo
|
||||
,ソフト,soft
|
||||
,タイプ,"type,style"
|
||||
,チェック・する,to check
|
||||
,テキスト,"text,text book"
|
||||
,テニス,tennis
|
||||
,パート,part time
|
||||
,パソコン,personal computer
|
||||
,ハンドバッグ,handbag
|
||||
,ピアノ,piano
|
||||
,ビル,building or bill
|
||||
,ファックス,fax
|
||||
,プレゼント,present
|
||||
,ベル,bell
|
||||
,レジ,register
|
||||
,レポート/リポート,report
|
||||
,ワープロ,word processor
|
||||
|
@@ -1,669 +0,0 @@
|
||||
会う,あう,to meet
|
||||
青,あお,blue
|
||||
青い,あおい,blue
|
||||
赤,あか,red
|
||||
赤い,あかい,red
|
||||
明い,あかるい,bright
|
||||
秋,あき,autumn
|
||||
開く,あく,"to open,to become open"
|
||||
開ける,あける,to open
|
||||
上げる,あげる,to give
|
||||
朝,あさ,morning
|
||||
朝御飯,あさごはん,breakfast
|
||||
,あさって,day after tomorrow
|
||||
足,あし,"foot,leg"
|
||||
明日,あした,tomorrow
|
||||
,あそこ,over there
|
||||
遊ぶ,あそぶ,"to play,to make a visit"
|
||||
暖かい,あたたかい,warm
|
||||
頭,あたま,head
|
||||
新しい,あたらしい,new
|
||||
,あちら,there
|
||||
暑い,あつい,hot
|
||||
熱い,あつい,hot to the touch
|
||||
厚い,あつい,"kind, deep, thick"
|
||||
,あっち,over there
|
||||
後,あと,afterwards
|
||||
,あなた,you
|
||||
兄,あに,(humble) older brother
|
||||
姉,あね,(humble) older sister
|
||||
,あの,that over there
|
||||
,あの,um...
|
||||
,アパート,apartment
|
||||
,あびる,"to bathe,to shower"
|
||||
危ない,あぶない,dangerous
|
||||
甘い,あまい,sweet
|
||||
,あまり,not very
|
||||
雨,あめ,rain
|
||||
飴,あめ,candy
|
||||
洗う,あらう,to wash
|
||||
,ある,"to be,to have (used for inanimate objects)"
|
||||
歩く,あるく,to walk
|
||||
,あれ,that
|
||||
,いい/よい,good
|
||||
,いいえ,no
|
||||
言う,いう,to say
|
||||
家,いえ,house
|
||||
,いかが,how
|
||||
行く,いく,to go
|
||||
,いくつ,"how many?,how old?"
|
||||
,いくら,how much?
|
||||
池,いけ,pond
|
||||
医者,いしゃ,medical doctor
|
||||
,いす,chair
|
||||
忙しい,いそがしい,"busy,irritated"
|
||||
痛い,いたい,painful
|
||||
一,いち,one
|
||||
一日,いちにち,"(1) one day, (2) first of month"
|
||||
,いちばん,"best,first"
|
||||
,いつ,when
|
||||
五日,いつか,"five days, fifth day"
|
||||
一緒,いっしょ,together
|
||||
五つ,いつつ,five
|
||||
,いつも,always
|
||||
犬,いぬ,dog
|
||||
今,いま,now
|
||||
意味,いみ,meaning
|
||||
妹,いもうと,(humble) younger sister
|
||||
嫌,いや,unpleasant
|
||||
入口,いりぐち,entrance
|
||||
居る,いる,"to be, to have (used for people and animals)"
|
||||
要る,いる,to need
|
||||
入れる,いれる,to put in
|
||||
色,いろ,colour
|
||||
,いろいろ,various
|
||||
上,うえ,on top of
|
||||
後ろ,うしろ,behind
|
||||
薄い,うすい,"thin,weak"
|
||||
歌,うた,song
|
||||
歌う,うたう,to sing
|
||||
生まれる,うまれる,to be born
|
||||
海,うみ,sea
|
||||
売る,うる,to sell
|
||||
煩い,うるさい,"noisy,annoying"
|
||||
上着,うわぎ,jacket
|
||||
絵,え,picture
|
||||
映画,えいが,movie
|
||||
映画館,えいがかん,cinema
|
||||
英語,えいご,English language
|
||||
,ええ,yes
|
||||
駅,えき,station
|
||||
,エレベーター,elevator
|
||||
鉛筆,えんぴつ,pencil
|
||||
,おいしい,delicious
|
||||
多い,おおい,many
|
||||
大きい,おおきい,big
|
||||
大きな,おおきな,big
|
||||
大勢,おおぜい,great number of people
|
||||
お母さん,おかあさん,(honorable) mother
|
||||
お菓子,おかし,"sweets, candy"
|
||||
お金,おかね,money
|
||||
起きる,おきる,to get up
|
||||
置く,おく,to put
|
||||
奥さん,おくさん,(honorable) wife
|
||||
お酒,おさけ,"alcohol, rice wine"
|
||||
お皿,おさら,"plate, dish"
|
||||
伯父/叔父,おじいさん,"grandfather,male senior citizen"
|
||||
教える,おしえる,"to teach,to tell"
|
||||
伯父/叔父,おじさん,"uncle,middle aged gentleman"
|
||||
押す,おす,"to push, to stamp something"
|
||||
遅い,おそい,"late,slow"
|
||||
お茶,おちゃ,green tea
|
||||
お手洗い,おてあらい,bathroom
|
||||
お父さん,おとうさん,(honorable) father
|
||||
弟,おとうと,younger brother
|
||||
男,おとこ,man
|
||||
男の子,おとこのこ,boy
|
||||
一昨日,おととい,day before yesterday
|
||||
一昨年,おととし,year before last
|
||||
大人,おとな,adult
|
||||
,おなか,stomach
|
||||
同じ,おなじ,same
|
||||
お兄さん,おにいさん,(honorable) older brother
|
||||
お姉さん,おねえさん,(honorable) older sister
|
||||
,おばあさん,"grandmother,female senior-citizen"
|
||||
伯母さん/叔母さん,おばさん,aunt
|
||||
お風呂,おふろ,bath
|
||||
お弁当,おべんとう,boxed lunch
|
||||
覚える,おぼえる,to remember
|
||||
,おまわりさん,friendly term for policeman
|
||||
重い,おもい,heavy
|
||||
,おもしろい,interesting
|
||||
泳ぐ,およぐ,to swim
|
||||
降りる,おりる,"to get off, to descend"
|
||||
終る,おわる,to finish
|
||||
音楽,おんがく,music
|
||||
女,おんな,woman
|
||||
女の子,おんなのこ,girl
|
||||
外国,がいこく,foreign country
|
||||
外国人,がいこくじん,foreigner
|
||||
会社,かいしゃ,company
|
||||
階段,かいだん,stairs
|
||||
買い物,かいもの,shopping
|
||||
買う,かう,to buy
|
||||
返す,かえす,to return something
|
||||
帰る,かえる,to go back
|
||||
,かかる,to take time or money
|
||||
,かぎ,key
|
||||
書く,かく,to write
|
||||
学生,がくせい,student
|
||||
,かける,to call by phone
|
||||
傘,かさ,umbrella
|
||||
貸す,かす,to lend
|
||||
風,かぜ,wind
|
||||
風邪,かぜ,a cold
|
||||
家族,かぞく,family
|
||||
方,かた,"person, way of doing"
|
||||
学校,がっこう,school
|
||||
,カップ,cup
|
||||
家庭,かてい,household
|
||||
角,かど,a corner
|
||||
,かばん,"bag,basket"
|
||||
花瓶,かびん,a vase
|
||||
紙,かみ,paper
|
||||
,カメラ,camera
|
||||
火曜日,かようび,Tuesday
|
||||
辛い,からい,spicy
|
||||
体,からだ,body
|
||||
借りる,かりる,to borrow
|
||||
軽い,かるい,light
|
||||
,カレー,curry
|
||||
,カレンダー,calendar
|
||||
川/河,かわ,river
|
||||
,かわいい,cute
|
||||
漢字,かんじ,Chinese character
|
||||
木,き,"tree,wood"
|
||||
黄色,きいろ,yellow
|
||||
黄色い,きいろい,yellow
|
||||
消える,きえる,to disappear
|
||||
聞く,きく,"to hear,to listen to,to ask"
|
||||
北,きた,north
|
||||
,ギター,guitar
|
||||
汚い,きたない,dirty
|
||||
喫茶店,きっさてん,coffee lounge
|
||||
切手,きって,postage stamp
|
||||
切符,きっぷ,ticket
|
||||
昨日,きのう,yesterday
|
||||
九,きゅう / く,nine
|
||||
牛肉,ぎゅうにく,beef
|
||||
牛乳,ぎゅうにゅう,milk
|
||||
今日,きょう,today
|
||||
教室,きょうしつ,classroom
|
||||
兄弟,きょうだい,(humble) siblings
|
||||
去年,きょねん,last year
|
||||
嫌い,きらい,hate
|
||||
切る,きる,to cut
|
||||
着る,きる,to put on from the shoulders down
|
||||
,きれい,"pretty,clean"
|
||||
,キロ/キログラム,kilogram
|
||||
,キロ/キロメートル,kilometre
|
||||
銀行,ぎんこう,bank
|
||||
金曜日,きんようび,Friday
|
||||
薬,くすり,medicine
|
||||
,ください,please
|
||||
果物,くだもの,fruit
|
||||
口,くち,"mouth,opening"
|
||||
靴,くつ,shoes
|
||||
靴下,くつした,socks
|
||||
国,くに,country
|
||||
曇り,くもり,cloudy weather
|
||||
曇る,くもる,"to become cloudy,to become dim"
|
||||
暗い,くらい,gloomy
|
||||
,クラス,class
|
||||
,グラム,gram
|
||||
来る,くる,to come
|
||||
車,くるま,"car,vehicle"
|
||||
黒,くろ,black
|
||||
黒い,くろい,black
|
||||
警官,けいかん,policeman
|
||||
今朝,けさ,this morning
|
||||
消す,けす,"to erase,to turn off power"
|
||||
結構,けっこう,"splendid,enough"
|
||||
結婚,けっこん,marriage
|
||||
月曜日,げつようび,Monday
|
||||
玄関,げんかん,entry hall
|
||||
元気,げんき,"health, vitality"
|
||||
五,ご,five
|
||||
公園,こうえん,park
|
||||
交差点,こうさてん,intersection
|
||||
紅茶,こうちゃ,black tea
|
||||
交番,こうばん,police box
|
||||
声,こえ,voice
|
||||
,コート,"coat,tennis court"
|
||||
,コーヒー,coffee
|
||||
,ここ,here
|
||||
午後,ごご,afternoon
|
||||
九日,ここのか,"nine days, ninth day"
|
||||
九つ,ここのつ,nine
|
||||
午前,ごぜん,morning
|
||||
答える,こたえる,to answer
|
||||
,こちら,this person or way
|
||||
,こっち,this person or way
|
||||
,コップ,a glass
|
||||
今年,ことし,this year
|
||||
言葉,ことば,"word,language"
|
||||
子供,こども,child
|
||||
,この,this
|
||||
御飯,ごはん,"cooked rice,meal"
|
||||
,コピーする,to copy
|
||||
困る,こまる,to be worried
|
||||
,これ,this
|
||||
今月,こんげつ,this month
|
||||
今週,こんしゅう,this week
|
||||
,こんな,such
|
||||
今晩,こんばん,this evening
|
||||
,さあ,well…
|
||||
財布,さいふ,wallet
|
||||
魚,さかな,fish
|
||||
先,さき,"the future,previous"
|
||||
咲く,さく,to bloom
|
||||
作文,さくぶん,"composition,writing"
|
||||
差す,さす,"to stretch out hands,to raise an umbrella"
|
||||
雑誌,ざっし,magazine
|
||||
砂糖,さとう,sugar
|
||||
寒い,さむい,cold
|
||||
さ来年,さらいねん,year after next
|
||||
三,さん,three
|
||||
散歩,さんぽする,to stroll
|
||||
四,し / よん,four
|
||||
塩,しお,salt
|
||||
,しかし,however
|
||||
時間,じかん,time
|
||||
仕事,しごと,job
|
||||
辞書,じしょ,dictionary
|
||||
静か,しずか,quiet
|
||||
下,した,below
|
||||
七,しち / なな,seven
|
||||
質問,しつもん,question
|
||||
自転車,じてんしゃ,bicycle
|
||||
自動車,じどうしゃ,automobile
|
||||
死ぬ,しぬ,to die
|
||||
字引,じびき,dictionary
|
||||
自分,じぶん,oneself
|
||||
閉まる,しまる,"to close,to be closed"
|
||||
閉める,しめる,to close something
|
||||
締める,しめる,to tie
|
||||
,じゃ/じゃあ,well then…
|
||||
写真,しゃしん,photograph
|
||||
,シャツ,shirt
|
||||
,シャワー,shower
|
||||
十,じゅう とお,ten
|
||||
授業,じゅぎょう,"lesson,class work"
|
||||
宿題,しゅくだい,homework
|
||||
上手,じょうず,skillful
|
||||
丈夫,じょうぶ,"strong,durable"
|
||||
,しょうゆ,soy sauce
|
||||
食堂,しょくどう,dining hall
|
||||
知る,しる,to know
|
||||
白,しろ,white
|
||||
白い,しろい,white
|
||||
新聞,しんぶん,newspaper
|
||||
水曜日,すいようび,Wednesday
|
||||
吸う,すう,"to smoke,to suck"
|
||||
,スカート,skirt
|
||||
好き,すき,likeable
|
||||
少ない,すくない,a few
|
||||
,すぐに,instantly
|
||||
少し,すこし,few
|
||||
涼しい,すずしい,refreshing
|
||||
,ストーブ,heater
|
||||
,スプーン,spoon
|
||||
,スポーツ,sport
|
||||
,ズボン,trousers
|
||||
住む,すむ,to live in
|
||||
,スリッパ,slippers
|
||||
,する,to do
|
||||
座る,すわる,to sit
|
||||
背,せ,"height,stature"
|
||||
生徒,せいと,pupil
|
||||
,セーター,"sweater,jumper"
|
||||
,せっけん,soap
|
||||
背広,せびろ,business suit
|
||||
狭い,せまい,narrow
|
||||
,ゼロ,zero
|
||||
千,せん,thousand
|
||||
先月,せんげつ,last month
|
||||
先週,せんしゅう,last week
|
||||
先生,せんせい,"teacher,doctor"
|
||||
洗濯,せんたく,washing
|
||||
全部,ぜんぶ,all
|
||||
掃除,そうじする,"to clean, to sweep"
|
||||
,そうして/そして,and
|
||||
,そこ,that place
|
||||
,そちら,over there
|
||||
,そっち,over there
|
||||
外,そと,outside
|
||||
,その,that
|
||||
,そば,"near,beside"
|
||||
空,そら,sky
|
||||
,それ,that
|
||||
,それから,after that
|
||||
,それでは,in that situation
|
||||
大学,だいがく,university
|
||||
大使館,たいしかん,embassy
|
||||
大丈夫,だいじょうぶ,all right
|
||||
大好き,だいすき,to be very likeable
|
||||
大切,たいせつ,important
|
||||
台所,だいどころ,kitchen
|
||||
,たいへん,very
|
||||
,たいへん,difficult situation
|
||||
高い,たかい,"tall, expensive"
|
||||
,たくさん,many
|
||||
,タクシー,taxi
|
||||
出す,だす,to put out
|
||||
立つ,たつ,to stand
|
||||
,たて,"length,height"
|
||||
建物,たてもの,building
|
||||
楽しい,たのしい,enjoyable
|
||||
頼む,たのむ,to ask
|
||||
,たばこ,"tobacco,cigarettes"
|
||||
,たぶん,probably
|
||||
食べ物,たべもの,food
|
||||
食べる,たべる,to eat
|
||||
卵,たまご,egg
|
||||
誰,だれ,who
|
||||
誰,だれか,somebody
|
||||
誕生日,たんじょうび,birthday
|
||||
,だんだん,gradually
|
||||
小さい,ちいさい,little
|
||||
小さな,ちいさな,little
|
||||
近い,ちかい,near
|
||||
違う,ちがう,to differ
|
||||
近く,ちかく,near
|
||||
地下鉄,ちかてつ,underground train
|
||||
地図,ちず,map
|
||||
茶色,ちゃいろ,brown
|
||||
,ちゃわん,rice bowl
|
||||
,ちょうど,exactly
|
||||
,ちょっと,somewhat
|
||||
一日,ついたち,first of month
|
||||
使う,つかう,to use
|
||||
疲れる,つかれる,to get tired
|
||||
次,つぎ,next
|
||||
着く,つく,to arrive at
|
||||
机,つくえ,desk
|
||||
作る,つくる,to make
|
||||
,つける,to turn on
|
||||
勤める,つとめる,to work for someone
|
||||
,つまらない,boring
|
||||
冷たい,つめたい,cold to the touch
|
||||
強い,つよい,powerful
|
||||
手,て,hand
|
||||
,テープ,tape
|
||||
,テーブル,table
|
||||
,テープレコーダー,tape recorder
|
||||
出かける,でかける,to go out
|
||||
手紙,てがみ,letter
|
||||
,できる,to be able to
|
||||
出口,でぐち,exit
|
||||
,テスト,test
|
||||
,では,with that...
|
||||
,デパート,department store
|
||||
,でも,but
|
||||
出る,でる,"to appear,to leave"
|
||||
,テレビ,television
|
||||
天気,てんき,weather
|
||||
電気,でんき,"electricity,electric light"
|
||||
電車,でんしゃ,electric train
|
||||
電話,でんわ,telephone
|
||||
戸,と,Japanese style door
|
||||
,ドア,Western style door
|
||||
,トイレ,toilet
|
||||
,どう,"how,in what way"
|
||||
,どうして,for what reason
|
||||
,どうぞ,please
|
||||
動物,どうぶつ,animal
|
||||
,どうも,thanks
|
||||
遠い,とおい,far
|
||||
十日,とおか,"ten days,the tenth day"
|
||||
時々,ときどき,sometimes
|
||||
時計,とけい,"watch,clock"
|
||||
,どこ,where
|
||||
所,ところ,place
|
||||
年,とし,year
|
||||
図書館,としょかん,library
|
||||
,どちら,which of two
|
||||
,どっち,which
|
||||
,とても,very
|
||||
,どなた,who
|
||||
隣,となり,next door to
|
||||
,どの,which
|
||||
飛ぶ,とぶ,"to fly,to hop"
|
||||
止まる,とまる,to come to a halt
|
||||
友達,ともだち,friend
|
||||
土曜日,どようび,Saturday
|
||||
鳥,とり,bird
|
||||
とり肉,とりにく,chicken meat
|
||||
取る,とる,to take something
|
||||
撮る,とる,to take a photo or record a film
|
||||
,どれ,which (of three or more)
|
||||
,ナイフ,knife
|
||||
中,なか,middle
|
||||
長い,ながい,long
|
||||
鳴く,なく,"animal noise. to chirp, roar or croak etc."
|
||||
無くす,なくす,to lose something
|
||||
,なぜ,why
|
||||
夏,なつ,summer
|
||||
夏休み,なつやすみ,summer holiday
|
||||
,など,et cetera
|
||||
七つ,ななつ,seven
|
||||
七日,なのか,"seven days,the seventh day"
|
||||
名前,なまえ,name
|
||||
習う,ならう,to learn
|
||||
並ぶ,ならぶ,"to line up,to stand in a line"
|
||||
並べる,ならべる,"to line up,to set up"
|
||||
,なる,to become
|
||||
何,なん/なに,what
|
||||
二,に,two
|
||||
賑やか,にぎやか,"bustling,busy"
|
||||
肉,にく,meat
|
||||
西,にし,west
|
||||
日曜日,にちようび,Sunday
|
||||
荷物,にもつ,luggage
|
||||
,ニュース,news
|
||||
庭,にわ,garden
|
||||
脱ぐ,ぬぐ,to take off clothes
|
||||
温い,ぬるい,luke warm
|
||||
,ネクタイ,"tie,necktie"
|
||||
猫,ねこ,cat
|
||||
寝る,ねる,"to go to bed,to sleep"
|
||||
,ノート,"notebook,exercise book"
|
||||
登る,のぼる,to climb
|
||||
飲み物,のみもの,a drink
|
||||
飲む,のむ,to drink
|
||||
乗る,のる,"to get on,to ride"
|
||||
歯,は,tooth
|
||||
,パーティー,party
|
||||
,はい,yes
|
||||
灰皿,はいざら,ashtray
|
||||
入る,はいる,"to enter,to contain"
|
||||
葉書,はがき,postcard
|
||||
,はく,"to wear,to put on trousers"
|
||||
箱,はこ,box
|
||||
橋,はし,bridge
|
||||
,はし,chopsticks
|
||||
始まる,はじまる,to begin
|
||||
初め/始め,はじめ,beginning
|
||||
初めて,はじめて,for the first time
|
||||
走る,はしる,to run
|
||||
,バス,bus
|
||||
,バター,butter
|
||||
二十歳,はたち,"20 years old,20th year"
|
||||
働く,はたらく,to work
|
||||
八,はち,eight
|
||||
二十日,はつか,"twenty days,twentieth"
|
||||
花,はな,flower
|
||||
鼻,はな,nose
|
||||
話,はなし,"talk,story"
|
||||
話す,はなす,to speak
|
||||
早い,はやい,early
|
||||
速い,はやい,quick
|
||||
春,はる,spring
|
||||
貼る,はる,to stick
|
||||
晴れ,はれ,clear weather
|
||||
晴れる,はれる,to be sunny
|
||||
半,はん,half
|
||||
晩,ばん,evening
|
||||
,パン,bread
|
||||
,ハンカチ,handkerchief
|
||||
番号,ばんごう,number
|
||||
晩御飯,ばんごはん,evening meal
|
||||
半分,はんぶん,half minute
|
||||
東,ひがし,east
|
||||
引く,ひく,to pull
|
||||
弾く,ひく,"to play an instrument with strings, including piano"
|
||||
低い,ひくい,"short,low"
|
||||
飛行機,ひこうき,aeroplane
|
||||
左,ひだり,left hand side
|
||||
人,ひと,person
|
||||
一つ,ひとつ,one
|
||||
一月,ひとつき,one month
|
||||
一人,ひとり,one person
|
||||
暇,ひま,free time
|
||||
百,ひゃく,hundred
|
||||
病院,びょういん,hospital
|
||||
病気,びょうき,illness
|
||||
昼,ひる,"noon, daytime"
|
||||
昼御飯,ひるごはん,midday meal
|
||||
広い,ひろい,"spacious,wide"
|
||||
,フィルム,roll of film
|
||||
封筒,ふうとう,envelope
|
||||
,プール,swimming pool
|
||||
,フォーク,fork
|
||||
吹く,ふく,to blow
|
||||
服,ふく,clothes
|
||||
二つ,ふたつ,two
|
||||
豚肉,ぶたにく,pork
|
||||
二人,ふたり,two people
|
||||
二日,ふつか,"two days, second day of the month"
|
||||
太い,ふとい,fat
|
||||
冬,ふゆ,winter
|
||||
降る,ふる,"to fall, e.g. rain or snow"
|
||||
古い,ふるい,old (not used for people)
|
||||
,ふろ,bath
|
||||
文章,ぶんしょう,"sentence,text"
|
||||
,ページ,page
|
||||
下手,へた,unskillful
|
||||
,ベッド,bed
|
||||
,ペット,pet
|
||||
部屋,へや,room
|
||||
辺,へん,area
|
||||
,ペン,pen
|
||||
勉強,べんきょうする,to study
|
||||
便利,べんり,"useful, convenient"
|
||||
帽子,ぼうし,hat
|
||||
,ボールペン,ball-point pen
|
||||
,ほか,"other, the rest"
|
||||
,ポケット,pocket
|
||||
欲しい,ほしい,want
|
||||
,ポスト,post
|
||||
細い,ほそい,thin
|
||||
,ボタン,button
|
||||
,ホテル,hotel
|
||||
本,ほん,book
|
||||
本棚,ほんだな,bookshelves
|
||||
,ほんとう,truth
|
||||
毎朝,まいあさ,every morning
|
||||
毎月,まいげつ/まいつき,every month
|
||||
毎週,まいしゅう,every week
|
||||
毎日,まいにち,every day
|
||||
毎年,まいねん/まいとし,every year
|
||||
毎晩,まいばん,every night
|
||||
前,まえ,before
|
||||
曲る,まがる,"to turn,to bend"
|
||||
,まずい,unpleasant
|
||||
,また,"again,and"
|
||||
,まだ,"yet,still"
|
||||
町,まち,"town,city"
|
||||
待つ,まつ,to wait
|
||||
,まっすぐ,"straight ahead,direct"
|
||||
,マッチ,match
|
||||
窓,まど,window
|
||||
丸い/円い,まるい,"round,circular"
|
||||
万,まん,ten thousand
|
||||
万年筆,まんねんひつ,fountain pen
|
||||
磨く,みがく,"to brush teeth, to polish"
|
||||
右,みぎ,right side
|
||||
短い,みじかい,short
|
||||
水,みず,water
|
||||
店,みせ,shop
|
||||
見せる,みせる,to show
|
||||
道,みち,street
|
||||
三日,みっか,"three days, third day of the month"
|
||||
三つ,みっつ,three
|
||||
緑,みどり,green
|
||||
皆さん,みなさん,everyone
|
||||
南,みなみ,south
|
||||
耳,みみ,ear
|
||||
見る 観る,みる,"to see, to watch"
|
||||
,みんな,everyone
|
||||
六日,むいか,"six days, sixth day of the month"
|
||||
向こう,むこう,over there
|
||||
難しい,むずかしい,difficult
|
||||
六つ,むっつ,six
|
||||
村,むら,village
|
||||
目,め,eye
|
||||
,メートル,metre
|
||||
眼鏡,めがね,glasses
|
||||
,もう,already
|
||||
もう一度,もういちど,again
|
||||
木曜日,もくようび,Thursday
|
||||
持つ,もつ,to hold
|
||||
,もっと,more
|
||||
物,もの,thing
|
||||
門,もん,gate
|
||||
問題,もんだい,problem
|
||||
八百屋,やおや,greengrocer
|
||||
野菜,やさい,vegetable
|
||||
易しい,やさしい,"easy, simple"
|
||||
安い,やすい,cheap
|
||||
休み,やすみ,"rest,holiday"
|
||||
休む,やすむ,to rest
|
||||
八つ,やっつ,eight
|
||||
山,やま,mountain
|
||||
,やる,to do
|
||||
夕方,ゆうがた,evening
|
||||
夕飯,ゆうはん,dinner
|
||||
郵便局,ゆうびんきょく,post office
|
||||
昨夜,ゆうべ,last night
|
||||
有名,ゆうめい,famous
|
||||
雪,ゆき,snow
|
||||
行く,ゆく,to go
|
||||
,ゆっくりと,slowly
|
||||
八日,ようか,"eight days, eighth day of the month"
|
||||
洋服,ようふく,western-style clothes
|
||||
,よく,"often, well"
|
||||
横,よこ,"beside,side,width"
|
||||
四日,よっか,"four days, fouth day of the month"
|
||||
四つ,よっつ,four
|
||||
呼ぶ,よぶ,"to call out,to invite"
|
||||
読む,よむ,to read
|
||||
夜,よる,"evening,night"
|
||||
弱い,よわい,weak
|
||||
来月,らいげつ,next month
|
||||
来週,らいしゅう,next week
|
||||
来年,らいねん,next year
|
||||
,ラジオ,radio
|
||||
,ラジカセ / ラジオカセット,radio cassette player
|
||||
,りっぱ,splendid
|
||||
留学生,りゅうがくせい,overseas student
|
||||
両親,りょうしん,both parents
|
||||
料理,りょうり,cuisine
|
||||
旅行,りょこう,travel
|
||||
零,れい,zero
|
||||
冷蔵庫,れいぞうこ,refrigerator
|
||||
,レコード,record
|
||||
,レストラン,restaurant
|
||||
練習,れんしゅうする,to practice
|
||||
廊下,ろうか,corridor
|
||||
六,ろく,six
|
||||
,ワイシャツ,business shirt
|
||||
若い,わかい,young
|
||||
分かる,わかる,to be understood
|
||||
忘れる,わすれる,to forget
|
||||
私,わたくし,"(humble) I,myself"
|
||||
私,わたし,"I,myself"
|
||||
渡す,わたす,to hand over
|
||||
渡る,わたる,to go across
|
||||
悪い,わるい,bad
|
||||
,より、ほう,Used for comparison.
|
||||
|
28
docs/database.md
Normal file
28
docs/database.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Database
|
||||
|
||||
Here are some choices that have been made when designing the schema
|
||||
|
||||
### `JMdict_{Reading,Kanji}Element.elementId` and `JMdict_Sense.senseId`
|
||||
|
||||
The `elementId`/`senseId` field acts as a unique identifier for each individual element in these tables.
|
||||
It is a packed version of the `(entryId, orderNum)` pair, where the first number is given 7 digits and the second is given 2 digits (max count found so far is `40`).
|
||||
Since `entryId` already is a field in the table, it would technically have been fine to store the `orderNum` as a separate field,
|
||||
but it is easier to be able to refer to the entries without a composite foreign key in other tables.
|
||||
|
||||
(NOTE: `entryId` is now inferred from `elementId` within sqlite using a generated column, so saying it is "stored in a separate field" might be a stretch)
|
||||
|
||||
In addition, the reading element id's are added with `1000000000` to make them unique from the kanji element id's. This reduces the amount of space needed for indices in some locations, because you can simply filter out each part with `>` or `<`.
|
||||
|
||||
We used to generate the `elementId` separately from `orderNum` as a sequential id, but it lead to all values
|
||||
shifting whenever the data was updated, leading to very big diffs. Making it be a unique composite of data coming
|
||||
from the source data itself means that the values will be stable across updates.
|
||||
|
||||
Due to the way the data is structured, we can use the `elementId` as the ordering number as well.
|
||||
|
||||
### `JMdict_EntryScore`
|
||||
|
||||
The `JMdict_EntryScore` table is used to store the score of each entry, which is used for sorting search results. The score is calculated based on a number of variables.
|
||||
|
||||
The table is automatically generated from other tables via triggers, and should be considered as a materialized view.
|
||||
|
||||
There is a score row for every single entry in both `JMdict_KanjiElement` and `JMdict_ReadingElement`, split by the `type` field.
|
||||
13
docs/lemmatizer.md
Normal file
13
docs/lemmatizer.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Lemmatizer
|
||||
|
||||
The lemmatizer is still quite experimental, but will play a more important role in the project in the future.
|
||||
|
||||
It is a manual implementation of a [Finite State Transducer](https://en.wikipedia.org/wiki/Morphological_dictionary#Finite_State_Transducers) for morphological parsing. The FST is used to recursively remove affixes from a word until it (hopefully) deconjugates into its dictionary form. This iterative deconjugation tree will then be combined with queries into the dictionary data to determine if the deconjugation leads to a real known word.
|
||||
|
||||
Each separate rule is a separate static object declared in `lib/util/lemmatizer/rules`.
|
||||
|
||||
There is a cli subcommand for testing the tool interactively, you can run
|
||||
|
||||
```bash
|
||||
dart run jadb lemmatize -w '食べさせられない'
|
||||
```
|
||||
28
docs/overview.md
Normal file
28
docs/overview.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Overview
|
||||
|
||||
This is the documentation for `jadb`. Since I'm currently the only one working on it, the documentation is more or less just notes to myself, to ensure I remember how and why I implemented certain features in a certain way a few months down the road. This is not a comprehensive and formal documentation for downstream use, neither for developers nor end-users.
|
||||
|
||||
- [Word Search](./word-search.md)
|
||||
- [Database](./database.md)
|
||||
- [Lemmatizer](./lemmatizer.md)
|
||||
|
||||
## Project structure
|
||||
|
||||
- `lib/_data_ingestion` contains all the code for reading data sources, transforming them and compiling them into an SQLite database. This is for the most part isolated from the rest of the codebase, and should not be depended on by any code used for querying the database.
|
||||
- `lib/cli` contains code for cli tooling (e.g. argument parsing, subcommand handling, etc.)
|
||||
- `lib/const_data` contains database data that is small enough to warrant being hardcoded as dart constants.
|
||||
- `lib/models` contains all the code for representing the database schema as Dart classes, and for converting between those classes and the actual database.
|
||||
- `lib/search` contains all the code for searching the database.
|
||||
- `lib/util/lemmatizer` contains the code for lemmatization, which will be used by the search code in the future.
|
||||
- `migrations` contains raw SQL files for creating the database schema.
|
||||
|
||||
## SQLite naming conventions
|
||||
|
||||
> [!WARNING]
|
||||
> All of these conventions are actually not enforced yet, it will be fixed at some point.
|
||||
|
||||
- Indices are prefixed with `IDX__`
|
||||
- Crossref tables are prefixed with `XREF__`
|
||||
- Trigger names are prefixed with `TRG__`
|
||||
- Views are prefixed with `VW__`
|
||||
- All data sources should have a `<datasource>_Version` table, which contains a single row with the version of the data source used to generate the database.
|
||||
21
docs/word-search.md
Normal file
21
docs/word-search.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Word search
|
||||
|
||||
The word search procedure is currently split into 3 parts:
|
||||
|
||||
1. **Entry ID query**:
|
||||
|
||||
Use a complex query with various scoring factors to try to get list of
|
||||
database ids pointing at dictionary entries, sorted by how likely we think this
|
||||
word is the word that the caller is looking for. The output here is a `List<int>`
|
||||
|
||||
2. **Data Query**:
|
||||
|
||||
Takes the entry id list from the last search, and performs all queries needed to retrieve
|
||||
all the dictionary data for those IDs. The result is a struct with a bunch of flattened lists
|
||||
with data for all the dictionary entries. These lists are sorted by the order that the ids
|
||||
were provided.
|
||||
|
||||
3. **Regrouping**:
|
||||
|
||||
Takes the flattened data, and regroups the items into structs with a more "hierarchical" structure.
|
||||
All data tagged with the same ID will end up in the same struct. Returns a list of these structs.
|
||||
76
flake.lock
generated
76
flake.lock
generated
@@ -1,48 +1,48 @@
|
||||
{
|
||||
"nodes": {
|
||||
"jmdict-src": {
|
||||
"flake": false,
|
||||
"datasources": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"narHash": "sha256-lh46uougUzBrRhhwa7cOb32j5Jt9/RjBUhlVjwVzsII=",
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e.gz"
|
||||
"lastModified": 1775550160,
|
||||
"narHash": "sha256-bgvKrMGUPaDY4EZv+82z1ccYoxwaergdVw/3PZhc2Fc=",
|
||||
"ref": "refs/heads/main",
|
||||
"rev": "f46229af3678124c5ea7c8dff3292747d0274f69",
|
||||
"revCount": 8,
|
||||
"type": "git",
|
||||
"url": "https://git.pvv.ntnu.no/Mugiten/datasources.git"
|
||||
},
|
||||
"original": {
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e.gz"
|
||||
"type": "git",
|
||||
"url": "https://git.pvv.ntnu.no/Mugiten/datasources.git"
|
||||
}
|
||||
},
|
||||
"jmdict-with-examples-src": {
|
||||
"kanjivg-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-5oS2xDyetbuSM6ax3LUjYA3N60x+D3Hg41HEXGFMqLQ=",
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e_examp.gz"
|
||||
"lastModified": 1775218066,
|
||||
"narHash": "sha256-iYv9xakgoGt/JwwdKDUCpSAF36hBtKlX9oN7xiLowjs=",
|
||||
"ref": "refs/heads/master",
|
||||
"rev": "544d319f79348c092d567b662f27f33dacfa60cd",
|
||||
"revCount": 2215,
|
||||
"type": "git",
|
||||
"url": "https://git.pvv.ntnu.no/mugiten/kanjivg.git"
|
||||
},
|
||||
"original": {
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/JMdict_e_examp.gz"
|
||||
}
|
||||
},
|
||||
"kanjidic2-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-orSeQqSxhn9TtX3anYtbiMEm7nFkuomGnIKoVIUR2CM=",
|
||||
"type": "file",
|
||||
"url": "https://www.edrdg.org/kanjidic/kanjidic2.xml.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "file",
|
||||
"url": "https://www.edrdg.org/kanjidic/kanjidic2.xml.gz"
|
||||
"type": "git",
|
||||
"url": "https://git.pvv.ntnu.no/mugiten/kanjivg.git"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1771848320,
|
||||
"narHash": "sha256-0MAd+0mun3K/Ns8JATeHT1sX28faLII5hVLq0L3BdZU=",
|
||||
"lastModified": 1775423009,
|
||||
"narHash": "sha256-vPKLpjhIVWdDrfiUM8atW6YkIggCEKdSAlJPzzhkQlw=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "2fc6539b481e1d2569f25f8799236694180c0993",
|
||||
"rev": "68d8aa3d661f0e6bd5862291b5bb263b2a6595c9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -51,25 +51,11 @@
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"radkfile-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"narHash": "sha256-DHpMUE2Umje8PbzXUCS6pHZeXQ5+WTxbjSkGU3erDHQ=",
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/radkfile.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "file",
|
||||
"url": "http://ftp.edrdg.org/pub/Nihongo/radkfile.gz"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"jmdict-src": "jmdict-src",
|
||||
"jmdict-with-examples-src": "jmdict-with-examples-src",
|
||||
"kanjidic2-src": "kanjidic2-src",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"radkfile-src": "radkfile-src"
|
||||
"datasources": "datasources",
|
||||
"kanjivg-src": "kanjivg-src",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
64
flake.nix
64
flake.nix
@@ -4,24 +4,13 @@
|
||||
inputs = {
|
||||
nixpkgs.url = "nixpkgs/nixos-unstable";
|
||||
|
||||
jmdict-src = {
|
||||
# url = "http://ftp.edrdg.org/pub/Nihongo/JMdict.gz";
|
||||
url = "http://ftp.edrdg.org/pub/Nihongo/JMdict_e.gz";
|
||||
flake = false;
|
||||
datasources = {
|
||||
url = "git+https://git.pvv.ntnu.no/Mugiten/datasources.git";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
|
||||
jmdict-with-examples-src = {
|
||||
url = "http://ftp.edrdg.org/pub/Nihongo/JMdict_e_examp.gz";
|
||||
flake = false;
|
||||
};
|
||||
|
||||
radkfile-src = {
|
||||
url = "http://ftp.edrdg.org/pub/Nihongo/radkfile.gz";
|
||||
flake = false;
|
||||
};
|
||||
|
||||
kanjidic2-src = {
|
||||
url = "https://www.edrdg.org/kanjidic/kanjidic2.xml.gz";
|
||||
kanjivg-src = {
|
||||
url = "git+https://git.pvv.ntnu.no/mugiten/kanjivg.git";
|
||||
flake = false;
|
||||
};
|
||||
};
|
||||
@@ -29,10 +18,8 @@
|
||||
outputs = {
|
||||
self,
|
||||
nixpkgs,
|
||||
jmdict-src,
|
||||
jmdict-with-examples-src,
|
||||
radkfile-src,
|
||||
kanjidic2-src
|
||||
datasources,
|
||||
kanjivg-src,
|
||||
}: let
|
||||
inherit (nixpkgs) lib;
|
||||
systems = [
|
||||
@@ -77,19 +64,28 @@
|
||||
|
||||
devShells = forAllSystems (system: pkgs: {
|
||||
default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
packages = with pkgs; [
|
||||
dart
|
||||
gnumake
|
||||
lcov
|
||||
sqldiff
|
||||
sqlite-interactive
|
||||
];
|
||||
env = {
|
||||
LIBSQLITE_PATH = "${pkgs.sqlite.out}/lib/libsqlite3.so";
|
||||
JADB_PATH = "result/jadb.sqlite";
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath [ pkgs.sqlite ];
|
||||
};
|
||||
};
|
||||
|
||||
sqlite-debugging = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
sqlite-interactive
|
||||
sqlite-analyzer
|
||||
sqlite-web
|
||||
# sqlint
|
||||
sqlfluff
|
||||
];
|
||||
env = {
|
||||
LIBSQLITE_PATH = "${pkgs.sqlite.out}/lib/libsqlite3.so";
|
||||
JADB_PATH = "result/jadb.sqlite";
|
||||
};
|
||||
};
|
||||
});
|
||||
|
||||
@@ -126,29 +122,21 @@
|
||||
ln -s ${src} $out
|
||||
'';
|
||||
|
||||
jmdict = pkgs.callPackage ./nix/jmdict.nix {
|
||||
inherit jmdict-src jmdict-with-examples-src edrdgMetadata;
|
||||
};
|
||||
|
||||
radkfile = pkgs.callPackage ./nix/radkfile.nix {
|
||||
inherit radkfile-src edrdgMetadata;
|
||||
};
|
||||
|
||||
kanjidic2 = pkgs.callPackage ./nix/kanjidic2.nix {
|
||||
inherit kanjidic2-src edrdgMetadata;
|
||||
};
|
||||
inherit (datasources.packages.${system}) jmdict radkfile kanjidic2;
|
||||
|
||||
database-tool = pkgs.callPackage ./nix/database_tool.nix {
|
||||
inherit src;
|
||||
};
|
||||
|
||||
database = pkgs.callPackage ./nix/database.nix {
|
||||
inherit (self.packages.${system}) database-tool jmdict radkfile kanjidic2;
|
||||
inherit (datasources.packages.${system}) jmdict radkfile kanjidic2 tanos-jlpt;
|
||||
inherit (self.packages.${system}) database-tool;
|
||||
inherit src;
|
||||
};
|
||||
|
||||
database-wal = pkgs.callPackage ./nix/database.nix {
|
||||
inherit (self.packages.${system}) database-tool jmdict radkfile kanjidic2;
|
||||
inherit (datasources.packages.${system}) jmdict radkfile kanjidic2 tanos-jlpt;
|
||||
inherit (self.packages.${system}) database-tool;
|
||||
inherit src;
|
||||
wal = true;
|
||||
};
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import 'package:jadb/_data_ingestion/sql_writable.dart';
|
||||
|
||||
abstract class Element extends SQLWritable {
|
||||
final int elementId;
|
||||
final String reading;
|
||||
final int? news;
|
||||
final int? ichi;
|
||||
final int? spec;
|
||||
final int? gai;
|
||||
final int? nf;
|
||||
const Element({
|
||||
Element({
|
||||
required this.elementId,
|
||||
required this.reading,
|
||||
this.news,
|
||||
this.ichi,
|
||||
@@ -18,6 +20,7 @@ abstract class Element extends SQLWritable {
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'elementId': elementId,
|
||||
'reading': reading,
|
||||
'news': news,
|
||||
'ichi': ichi,
|
||||
@@ -28,12 +31,11 @@ abstract class Element extends SQLWritable {
|
||||
}
|
||||
|
||||
class KanjiElement extends Element {
|
||||
int orderNum;
|
||||
List<String> info;
|
||||
|
||||
KanjiElement({
|
||||
this.info = const [],
|
||||
required this.orderNum,
|
||||
required super.elementId,
|
||||
required super.reading,
|
||||
super.news,
|
||||
super.ichi,
|
||||
@@ -45,21 +47,19 @@ class KanjiElement extends Element {
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
}
|
||||
|
||||
class ReadingElement extends Element {
|
||||
int orderNum;
|
||||
bool readingDoesNotMatchKanji;
|
||||
List<String> info;
|
||||
List<String> restrictions;
|
||||
|
||||
ReadingElement({
|
||||
required this.orderNum,
|
||||
required this.readingDoesNotMatchKanji,
|
||||
this.info = const [],
|
||||
this.restrictions = const [],
|
||||
required super.elementId,
|
||||
required super.reading,
|
||||
super.news,
|
||||
super.ichi,
|
||||
@@ -71,7 +71,6 @@ class ReadingElement extends Element {
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
...super.sqlValue,
|
||||
'orderNum': orderNum,
|
||||
'readingDoesNotMatchKanji': readingDoesNotMatchKanji,
|
||||
};
|
||||
}
|
||||
@@ -142,7 +141,6 @@ class XRef {
|
||||
|
||||
class Sense extends SQLWritable {
|
||||
final int senseId;
|
||||
final int orderNum;
|
||||
final List<XRefParts> antonyms;
|
||||
final List<String> dialects;
|
||||
final List<String> fields;
|
||||
@@ -157,7 +155,6 @@ class Sense extends SQLWritable {
|
||||
|
||||
const Sense({
|
||||
required this.senseId,
|
||||
required this.orderNum,
|
||||
this.antonyms = const [],
|
||||
this.dialects = const [],
|
||||
this.fields = const [],
|
||||
@@ -174,7 +171,6 @@ class Sense extends SQLWritable {
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'senseId': senseId,
|
||||
'orderNum': orderNum,
|
||||
};
|
||||
|
||||
bool get isEmpty =>
|
||||
|
||||
@@ -5,13 +5,22 @@ import 'package:jadb/_data_ingestion/jmdict/objects.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
/// A wrapper for the result of resolving an xref, which includes the resolved entry and a flag
|
||||
/// indicating whether the xref was ambiguous (i.e. could refer to multiple entries).
|
||||
class ResolvedXref {
|
||||
Entry entry;
|
||||
bool ambiguous;
|
||||
final Entry entry;
|
||||
final bool ambiguous;
|
||||
|
||||
ResolvedXref(this.entry, this.ambiguous);
|
||||
const ResolvedXref(this.entry, this.ambiguous);
|
||||
}
|
||||
|
||||
/// Resolves an xref (pair of kanji, optionally reading, and optionally sense number) to an a specific
|
||||
/// JMdict entry, if possible.
|
||||
///
|
||||
/// If the xref is ambiguous (i.e. it could refer to multiple entries), the
|
||||
/// first entry is returned, and the returned value is marked as ambiguous.
|
||||
///
|
||||
/// If the xref cannot be resolved to any entry at all, an exception is thrown.
|
||||
ResolvedXref resolveXref(
|
||||
SplayTreeMap<String, Set<Entry>> entriesByKanji,
|
||||
SplayTreeMap<String, Set<Entry>> entriesByReading,
|
||||
@@ -75,41 +84,32 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
print(' [JMdict] Batch 1 - Kanji and readings');
|
||||
Batch b = db.batch();
|
||||
|
||||
int elementId = 0;
|
||||
for (final e in entries) {
|
||||
b.insert(JMdictTableNames.entry, e.sqlValue);
|
||||
|
||||
for (final k in e.kanji) {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.kanjiElement,
|
||||
k.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
b.insert(JMdictTableNames.kanjiElement, k.sqlValue);
|
||||
|
||||
for (final i in k.info) {
|
||||
b.insert(JMdictTableNames.kanjiInfo, {
|
||||
'elementId': elementId,
|
||||
'elementId': k.elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (final r in e.readings) {
|
||||
elementId++;
|
||||
b.insert(
|
||||
JMdictTableNames.readingElement,
|
||||
r.sqlValue..addAll({'entryId': e.entryId, 'elementId': elementId}),
|
||||
);
|
||||
b.insert(JMdictTableNames.readingElement, r.sqlValue);
|
||||
|
||||
for (final i in r.info) {
|
||||
b.insert(JMdictTableNames.readingInfo, {
|
||||
'elementId': elementId,
|
||||
'elementId': r.elementId,
|
||||
'info': i,
|
||||
});
|
||||
}
|
||||
for (final res in r.restrictions) {
|
||||
b.insert(JMdictTableNames.readingRestriction, {
|
||||
'elementId': elementId,
|
||||
'elementId': r.elementId,
|
||||
'restriction': res,
|
||||
});
|
||||
}
|
||||
@@ -123,10 +123,7 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
|
||||
for (final e in entries) {
|
||||
for (final s in e.senses) {
|
||||
b.insert(
|
||||
JMdictTableNames.sense,
|
||||
s.sqlValue..addAll({'entryId': e.entryId}),
|
||||
);
|
||||
b.insert(JMdictTableNames.sense, s.sqlValue);
|
||||
for (final d in s.dialects) {
|
||||
b.insert(JMdictTableNames.senseDialect, {
|
||||
'senseId': s.senseId,
|
||||
@@ -150,16 +147,18 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
}
|
||||
for (final rk in s.restrictedToKanji) {
|
||||
b.insert(JMdictTableNames.senseRestrictedToKanji, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'kanji': rk,
|
||||
'kanjiElementId': e.kanji
|
||||
.firstWhere((k) => k.reading == rk)
|
||||
.elementId,
|
||||
});
|
||||
}
|
||||
for (final rr in s.restrictedToReading) {
|
||||
b.insert(JMdictTableNames.senseRestrictedToReading, {
|
||||
'entryId': e.entryId,
|
||||
'senseId': s.senseId,
|
||||
'reading': rr,
|
||||
'readingElementId': e.readings
|
||||
.firstWhere((r) => r.reading == rr)
|
||||
.elementId,
|
||||
});
|
||||
}
|
||||
for (final ls in s.languageSource) {
|
||||
@@ -181,24 +180,17 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
|
||||
print(' [JMdict] Building xref trees');
|
||||
final SplayTreeMap<String, Set<Entry>> entriesByKanji = SplayTreeMap();
|
||||
final SplayTreeMap<String, Set<Entry>> entriesByReading = SplayTreeMap();
|
||||
|
||||
for (final entry in entries) {
|
||||
for (final kanji in entry.kanji) {
|
||||
if (entriesByKanji.containsKey(kanji.reading)) {
|
||||
entriesByKanji.update(kanji.reading, (list) => list..add(entry));
|
||||
} else {
|
||||
entriesByKanji.putIfAbsent(kanji.reading, () => {entry});
|
||||
}
|
||||
entriesByKanji.putIfAbsent(kanji.reading, () => {});
|
||||
entriesByKanji.update(kanji.reading, (set) => set..add(entry));
|
||||
}
|
||||
}
|
||||
final SplayTreeMap<String, Set<Entry>> entriesByReading = SplayTreeMap();
|
||||
for (final entry in entries) {
|
||||
|
||||
for (final reading in entry.readings) {
|
||||
if (entriesByReading.containsKey(reading.reading)) {
|
||||
entriesByReading.update(reading.reading, (list) => list..add(entry));
|
||||
} else {
|
||||
entriesByReading.putIfAbsent(reading.reading, () => {entry});
|
||||
}
|
||||
entriesByReading.putIfAbsent(reading.reading, () => {});
|
||||
entriesByReading.update(reading.reading, (set) => set..add(entry));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,6 +199,7 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
|
||||
for (final e in entries) {
|
||||
for (final s in e.senses) {
|
||||
final seenSeeAlsoXrefs = <int>{};
|
||||
for (final xref in s.seeAlso) {
|
||||
final resolvedEntry = resolveXref(
|
||||
entriesByKanji,
|
||||
@@ -214,16 +207,28 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
xref,
|
||||
);
|
||||
|
||||
if (seenSeeAlsoXrefs.contains(resolvedEntry.entry.entryId)) {
|
||||
print(
|
||||
'WARNING: Skipping duplicate seeAlso xref from sense ${s.senseId} to entry ${resolvedEntry.entry.entryId}\n'
|
||||
' (kanjiRef: ${xref.kanjiRef}, readingRef: ${xref.readingRef}, senseOrderNum: ${xref.senseOrderNum})',
|
||||
);
|
||||
continue;
|
||||
}
|
||||
seenSeeAlsoXrefs.add(resolvedEntry.entry.entryId);
|
||||
|
||||
b.insert(JMdictTableNames.senseSeeAlso, {
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'seeAlsoKanji': xref.kanjiRef,
|
||||
'seeAlsoReading': xref.readingRef,
|
||||
'seeAlsoSense': xref.senseOrderNum,
|
||||
'seeAlsoSense': xref.senseOrderNum != null
|
||||
? xref.senseOrderNum! - 1
|
||||
: null,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
});
|
||||
}
|
||||
|
||||
final seenAntonymXrefs = <int>{};
|
||||
for (final ant in s.antonyms) {
|
||||
final resolvedEntry = resolveXref(
|
||||
entriesByKanji,
|
||||
@@ -231,12 +236,23 @@ Future<void> seedJMDictData(List<Entry> entries, Database db) async {
|
||||
ant,
|
||||
);
|
||||
|
||||
if (seenAntonymXrefs.contains(resolvedEntry.entry.entryId)) {
|
||||
print(
|
||||
'WARNING: Skipping duplicate antonym xref from sense ${s.senseId} to entry ${resolvedEntry.entry.entryId}\n'
|
||||
' (kanjiRef: ${ant.kanjiRef}, readingRef: ${ant.readingRef}, senseOrderNum: ${ant.senseOrderNum})',
|
||||
);
|
||||
continue;
|
||||
}
|
||||
seenAntonymXrefs.add(resolvedEntry.entry.entryId);
|
||||
|
||||
b.insert(JMdictTableNames.senseAntonyms, {
|
||||
'senseId': s.senseId,
|
||||
'xrefEntryId': resolvedEntry.entry.entryId,
|
||||
'antonymKanji': ant.kanjiRef,
|
||||
'antonymReading': ant.readingRef,
|
||||
'antonymSense': ant.senseOrderNum,
|
||||
'antonymSense': ant.senseOrderNum != null
|
||||
? ant.senseOrderNum! - 1
|
||||
: null,
|
||||
'ambiguous': resolvedEntry.ambiguous,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -10,14 +10,15 @@ List<int?> getPriorityValues(XmlElement e, String prefix) {
|
||||
final txt = pri.innerText;
|
||||
if (txt.startsWith('news')) {
|
||||
news = int.parse(txt.substring(4));
|
||||
} else if (txt.startsWith('ichi'))
|
||||
} else if (txt.startsWith('ichi')) {
|
||||
ichi = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('spec'))
|
||||
} else if (txt.startsWith('spec')) {
|
||||
spec = int.parse(txt.substring(4));
|
||||
else if (txt.startsWith('gai'))
|
||||
} else if (txt.startsWith('gai')) {
|
||||
gai = int.parse(txt.substring(3));
|
||||
else if (txt.startsWith('nf'))
|
||||
} else if (txt.startsWith('nf')) {
|
||||
nf = int.parse(txt.substring(2));
|
||||
}
|
||||
}
|
||||
return [news, ichi, spec, gai, nf];
|
||||
}
|
||||
@@ -70,8 +71,6 @@ XRefParts parseXrefParts(String s) {
|
||||
List<Entry> parseJMDictData(XmlElement root) {
|
||||
final List<Entry> entries = [];
|
||||
|
||||
int senseId = 0;
|
||||
|
||||
for (final entry in root.childElements) {
|
||||
final entryId = int.parse(entry.findElements('ent_seq').first.innerText);
|
||||
|
||||
@@ -79,16 +78,23 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
final List<ReadingElement> readingEls = [];
|
||||
final List<Sense> senses = [];
|
||||
|
||||
for (final (kanjiNum, k_ele) in entry.findElements('k_ele').indexed) {
|
||||
final kePri = getPriorityValues(k_ele, 'ke');
|
||||
for (final (orderNum, kEle) in entry.findElements('k_ele').indexed) {
|
||||
assert(
|
||||
orderNum < 100,
|
||||
'Entry $entryId has more than 100 kanji elements, which will break the elementId generation logic.',
|
||||
);
|
||||
final elementId = entryId * 100 + orderNum;
|
||||
|
||||
final kePri = getPriorityValues(kEle, 'ke');
|
||||
|
||||
kanjiEls.add(
|
||||
KanjiElement(
|
||||
orderNum: kanjiNum + 1,
|
||||
info: k_ele
|
||||
elementId: elementId,
|
||||
info: kEle
|
||||
.findElements('ke_inf')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
.toList(),
|
||||
reading: k_ele.findElements('keb').first.innerText,
|
||||
reading: kEle.findElements('keb').first.innerText,
|
||||
news: kePri[0],
|
||||
ichi: kePri[1],
|
||||
spec: kePri[2],
|
||||
@@ -98,24 +104,31 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
);
|
||||
}
|
||||
|
||||
for (final (orderNum, r_ele) in entry.findElements('r_ele').indexed) {
|
||||
final rePri = getPriorityValues(r_ele, 're');
|
||||
final readingDoesNotMatchKanji = r_ele
|
||||
for (final (orderNum, rEle) in entry.findElements('r_ele').indexed) {
|
||||
assert(
|
||||
orderNum < 100,
|
||||
'Entry $entryId has more than 100 readings, which will break the elementId generation logic.',
|
||||
);
|
||||
final elementId = 1_000_000_000 + entryId * 100 + orderNum;
|
||||
|
||||
final rePri = getPriorityValues(rEle, 're');
|
||||
final readingDoesNotMatchKanji = rEle
|
||||
.findElements('re_nokanji')
|
||||
.isNotEmpty;
|
||||
|
||||
readingEls.add(
|
||||
ReadingElement(
|
||||
orderNum: orderNum + 1,
|
||||
elementId: elementId,
|
||||
readingDoesNotMatchKanji: readingDoesNotMatchKanji,
|
||||
info: r_ele
|
||||
info: rEle
|
||||
.findElements('re_inf')
|
||||
.map((e) => e.innerText.substring(1, e.innerText.length - 1))
|
||||
.toList(),
|
||||
restrictions: r_ele
|
||||
restrictions: rEle
|
||||
.findElements('re_restr')
|
||||
.map((e) => e.innerText)
|
||||
.toList(),
|
||||
reading: r_ele.findElements('reb').first.innerText,
|
||||
reading: rEle.findElements('reb').first.innerText,
|
||||
news: rePri[0],
|
||||
ichi: rePri[1],
|
||||
spec: rePri[2],
|
||||
@@ -126,10 +139,14 @@ List<Entry> parseJMDictData(XmlElement root) {
|
||||
}
|
||||
|
||||
for (final (orderNum, sense) in entry.findElements('sense').indexed) {
|
||||
senseId++;
|
||||
assert(
|
||||
orderNum < 100,
|
||||
'Entry $entryId has more than 100 senses, which will break the senseId generation logic.',
|
||||
);
|
||||
final senseId = entryId * 100 + orderNum;
|
||||
|
||||
final result = Sense(
|
||||
senseId: senseId,
|
||||
orderNum: orderNum + 1,
|
||||
restrictedToKanji: sense
|
||||
.findElements('stagk')
|
||||
.map((e) => e.innerText)
|
||||
|
||||
@@ -248,9 +248,6 @@ class Character extends SQLWritable {
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'literal': literal,
|
||||
'grade': grade,
|
||||
'strokeCount': strokeCount,
|
||||
'frequency': frequency,
|
||||
'jlpt': jlpt,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -15,6 +15,27 @@ Future<void> seedKANJIDICData(List<Character> characters, Database db) async {
|
||||
// }
|
||||
b.insert(KANJIDICTableNames.character, c.sqlValue);
|
||||
|
||||
if (c.grade != null) {
|
||||
b.insert(KANJIDICTableNames.grade, {
|
||||
'kanji': c.literal,
|
||||
'grade': c.grade!,
|
||||
});
|
||||
}
|
||||
|
||||
if (c.frequency != null) {
|
||||
b.insert(KANJIDICTableNames.frequency, {
|
||||
'kanji': c.literal,
|
||||
'frequency': c.frequency!,
|
||||
});
|
||||
}
|
||||
|
||||
if (c.jlpt != null) {
|
||||
b.insert(KANJIDICTableNames.jlpt, {
|
||||
'kanji': c.literal,
|
||||
'jlpt': c.jlpt!,
|
||||
});
|
||||
}
|
||||
|
||||
for (final n in c.radicalName) {
|
||||
assert(c.radical != null, 'Radical name without radical');
|
||||
b.insert(
|
||||
|
||||
92
lib/_data_ingestion/kanjivg/objects.dart
Normal file
92
lib/_data_ingestion/kanjivg/objects.dart
Normal file
@@ -0,0 +1,92 @@
|
||||
import 'package:jadb/_data_ingestion/sql_writable.dart';
|
||||
|
||||
/// Enum set in the kvg:position attribute, used by `<g>` elements in the KanjiVG SVG files.
|
||||
enum KanjiPathGroupPosition {
|
||||
bottom,
|
||||
kamae,
|
||||
kamaec,
|
||||
left,
|
||||
middle,
|
||||
nyo,
|
||||
nyoc,
|
||||
right,
|
||||
tare,
|
||||
tarec,
|
||||
top,
|
||||
}
|
||||
|
||||
/// Contents of a \<g> element in the KanjiVG SVG files.
|
||||
class KanjiPathGroupTreeNode extends SQLWritable {
|
||||
final String id;
|
||||
final List<KanjiPathGroupTreeNode> children;
|
||||
final String? element;
|
||||
final String? original;
|
||||
final KanjiPathGroupPosition? position;
|
||||
final String? radical;
|
||||
final int? part;
|
||||
|
||||
KanjiPathGroupTreeNode({
|
||||
required this.id,
|
||||
this.children = const [],
|
||||
this.element,
|
||||
this.original,
|
||||
this.position,
|
||||
this.radical,
|
||||
this.part,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'id': id,
|
||||
'element': element,
|
||||
'original': original,
|
||||
'position': position?.name,
|
||||
'radical': radical,
|
||||
'part': part,
|
||||
};
|
||||
}
|
||||
|
||||
/// Contents of a `<text>` element in the StrokeNumber's group in the KanjiVG SVG files
|
||||
class KanjiStrokeNumber extends SQLWritable {
|
||||
final int num;
|
||||
final double x;
|
||||
final double y;
|
||||
|
||||
KanjiStrokeNumber(this.num, this.x, this.y);
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {'num': num, 'x': x, 'y': y};
|
||||
}
|
||||
|
||||
/// Contents of a `<path>` element in the KanjiVG SVG files
|
||||
class KanjiVGPath extends SQLWritable {
|
||||
final String id;
|
||||
final String type;
|
||||
final String svgPath;
|
||||
|
||||
KanjiVGPath({required this.id, required this.type, required this.svgPath});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {
|
||||
'id': id,
|
||||
'type': type,
|
||||
'svgPath': svgPath,
|
||||
};
|
||||
}
|
||||
|
||||
class KanjiVGItem extends SQLWritable {
|
||||
final String character;
|
||||
final List<KanjiVGPath> paths;
|
||||
final List<KanjiStrokeNumber> strokeNumbers;
|
||||
final List<KanjiPathGroupTreeNode> pathGroups;
|
||||
|
||||
KanjiVGItem({
|
||||
required this.character,
|
||||
required this.paths,
|
||||
required this.strokeNumbers,
|
||||
required this.pathGroups,
|
||||
});
|
||||
|
||||
@override
|
||||
Map<String, Object?> get sqlValue => {'character': character};
|
||||
}
|
||||
7
lib/_data_ingestion/kanjivg/seed_data.dart
Normal file
7
lib/_data_ingestion/kanjivg/seed_data.dart
Normal file
@@ -0,0 +1,7 @@
|
||||
import 'package:sqflite_common/sqflite.dart';
|
||||
|
||||
Future<void> seedKanjiVGData(Iterable<String> xmlContents, Database db) async {
|
||||
final b = db.batch();
|
||||
|
||||
await b.commit(noResult: true);
|
||||
}
|
||||
@@ -19,19 +19,18 @@ Future<Database> openLocalDb({
|
||||
throw Exception('JADB_PATH does not exist: $jadbPath');
|
||||
}
|
||||
|
||||
final db =
|
||||
await createDatabaseFactoryFfi().openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute('PRAGMA journal_mode = WAL');
|
||||
}
|
||||
await db.execute('PRAGMA foreign_keys = ON');
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
final db = await createDatabaseFactoryFfi().openDatabase(
|
||||
jadbPath,
|
||||
options: OpenDatabaseOptions(
|
||||
onConfigure: (db) async {
|
||||
if (walMode) {
|
||||
await db.execute('PRAGMA journal_mode = WAL');
|
||||
}
|
||||
await db.execute('PRAGMA foreign_keys = ON');
|
||||
},
|
||||
readOnly: !readWrite,
|
||||
),
|
||||
);
|
||||
|
||||
if (verifyTablesExist) {
|
||||
await db.jadbVerifyTables();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import 'dart:io';
|
||||
|
||||
Iterable<String> parseRADKFILEBlocks(File radkfile) {
|
||||
final String content = File('data/tmp/radkfile_utf8').readAsStringSync();
|
||||
final String content = radkfile.readAsStringSync();
|
||||
|
||||
final Iterable<String> blocks = content
|
||||
.replaceAll(RegExp(r'^#.*$'), '')
|
||||
|
||||
@@ -24,7 +24,7 @@ Future<void> seedData(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromJMdict(Database db) async {
|
||||
print('[JMdict] Reading file content...');
|
||||
final String rawXML = File('data/tmp/JMdict.xml').readAsStringSync();
|
||||
final String rawXML = File('data/JMdict.xml').readAsStringSync();
|
||||
|
||||
print('[JMdict] Parsing XML tags...');
|
||||
final XmlElement root = XmlDocument.parse(rawXML).getElement('JMdict')!;
|
||||
@@ -38,7 +38,7 @@ Future<void> parseAndSeedDataFromJMdict(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromKANJIDIC(Database db) async {
|
||||
print('[KANJIDIC2] Reading file...');
|
||||
final String rawXML = File('data/tmp/kanjidic2.xml').readAsStringSync();
|
||||
final String rawXML = File('data/kanjidic2.xml').readAsStringSync();
|
||||
|
||||
print('[KANJIDIC2] Parsing XML...');
|
||||
final XmlElement root = XmlDocument.parse(rawXML).getElement('kanjidic2')!;
|
||||
@@ -52,7 +52,7 @@ Future<void> parseAndSeedDataFromKANJIDIC(Database db) async {
|
||||
|
||||
Future<void> parseAndSeedDataFromRADKFILE(Database db) async {
|
||||
print('[RADKFILE] Reading file...');
|
||||
final File raw = File('data/tmp/RADKFILE');
|
||||
final File raw = File('data/RADKFILE');
|
||||
|
||||
print('[RADKFILE] Parsing content...');
|
||||
final blocks = parseRADKFILEBlocks(raw);
|
||||
|
||||
@@ -3,18 +3,18 @@ import 'dart:io';
|
||||
|
||||
import 'package:csv/csv.dart';
|
||||
import 'package:jadb/_data_ingestion/tanos-jlpt/objects.dart';
|
||||
import 'package:xml/xml_events.dart';
|
||||
|
||||
Future<List<JLPTRankedWord>> parseJLPTRankedWords(
|
||||
Map<String, File> files,
|
||||
) async {
|
||||
final List<JLPTRankedWord> result = [];
|
||||
|
||||
final codec = CsvCodec(
|
||||
final codec = Csv(
|
||||
fieldDelimiter: ',',
|
||||
lineDelimiter: '\n',
|
||||
quoteMode: QuoteMode.strings,
|
||||
escapeCharacter: '\\',
|
||||
parseHeaders: false,
|
||||
);
|
||||
|
||||
for (final entry in files.entries) {
|
||||
@@ -29,7 +29,6 @@ Future<List<JLPTRankedWord>> parseJLPTRankedWords(
|
||||
.openRead()
|
||||
.transform(utf8.decoder)
|
||||
.transform(codec.decoder)
|
||||
.flatten()
|
||||
.map((row) {
|
||||
if (row.length != 3) {
|
||||
throw Exception('Invalid line in $jlptLevel: $row');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const Map<(String?, String), int?> TANOS_JLPT_OVERRIDES = {
|
||||
const Map<(String?, String), int?> tanosJLPTOverrides = {
|
||||
// N5:
|
||||
(null, 'あなた'): 1223615,
|
||||
(null, 'あの'): 1000430,
|
||||
|
||||
@@ -76,7 +76,7 @@ Future<int?> findEntry(
|
||||
if ((entryIds.isEmpty || entryIds.length > 1) && useOverrides) {
|
||||
print('No entry found, trying to fetch from overrides');
|
||||
final overrideEntries = word.readings
|
||||
.map((reading) => TANOS_JLPT_OVERRIDES[(word.kanji, reading)])
|
||||
.map((reading) => tanosJLPTOverrides[(word.kanji, reading)])
|
||||
.whereType<int>()
|
||||
.toSet();
|
||||
|
||||
@@ -86,7 +86,7 @@ Future<int?> findEntry(
|
||||
);
|
||||
} else if (overrideEntries.isEmpty &&
|
||||
!word.readings.any(
|
||||
(reading) => TANOS_JLPT_OVERRIDES.containsKey((word.kanji, reading)),
|
||||
(reading) => tanosJLPTOverrides.containsKey((word.kanji, reading)),
|
||||
)) {
|
||||
throw Exception(
|
||||
'No override entry found for ${word.toString()}: $entryIds',
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/// Jouyou kanji sorted primarily by grades and secondarily by strokes.
|
||||
const Map<int, Map<int, List<String>>>
|
||||
JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT = {
|
||||
const Map<int, Map<int, List<String>>> jouyouKanjiByGradeAndStrokeCount = {
|
||||
1: {
|
||||
1: ['一'],
|
||||
2: ['力', '八', '入', '二', '人', '十', '七', '九'],
|
||||
@@ -1861,8 +1860,8 @@ JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT = {
|
||||
},
|
||||
};
|
||||
|
||||
final Map<int, List<String>> JOUYOU_KANJI_BY_GRADES =
|
||||
JOUYOU_KANJI_BY_GRADE_AND_STROKE_COUNT.entries
|
||||
final Map<int, List<String>> jouyouKanjiByGrades =
|
||||
jouyouKanjiByGradeAndStrokeCount.entries
|
||||
.expand((entry) => entry.value.entries)
|
||||
.map((entry) => MapEntry(entry.key, entry.value))
|
||||
.fold<Map<int, List<String>>>(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const Map<int, List<String>> RADICALS = {
|
||||
const Map<int, List<String>> radicals = {
|
||||
1: ['一', '|', '丶', 'ノ', '乙', '亅'],
|
||||
2: [
|
||||
'二',
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:jadb/table_names/kanjidic.dart';
|
||||
import 'package:jadb/table_names/kanjivg.dart';
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:jadb/table_names/tanos_jlpt.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
@@ -21,6 +22,7 @@ Future<void> verifyTablesWithDbConnection(DatabaseExecutor db) async {
|
||||
...KANJIDICTableNames.allTables,
|
||||
...RADKFILETableNames.allTables,
|
||||
...TanosJLPTTableNames.allTables,
|
||||
...KanjiVGTableNames.allTables,
|
||||
};
|
||||
|
||||
final missingTables = expectedTables.difference(tables);
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import 'package:jadb/models/common/jlpt_level.dart';
|
||||
import 'package:jadb/models/jmdict/jmdict_kanji_info.dart';
|
||||
import 'package:jadb/models/jmdict/jmdict_misc.dart';
|
||||
import 'package:jadb/models/jmdict/jmdict_reading_info.dart';
|
||||
import 'package:jadb/models/word_search/word_search_match_span.dart';
|
||||
import 'package:jadb/models/word_search/word_search_ruby.dart';
|
||||
import 'package:jadb/models/word_search/word_search_sense.dart';
|
||||
import 'package:jadb/models/word_search/word_search_sources.dart';
|
||||
import 'package:jadb/search/word_search/word_search.dart';
|
||||
import 'package:jadb/util/romaji_transliteration.dart';
|
||||
|
||||
/// A class representing a single dictionary entry from a word search.
|
||||
class WordSearchResult {
|
||||
@@ -44,6 +46,42 @@ class WordSearchResult {
|
||||
/// the original searchword.
|
||||
List<WordSearchMatchSpan>? matchSpans;
|
||||
|
||||
/// Whether the first item in [japanese] contains kanji that likely is rare.
|
||||
bool get hasUnusualKanji =>
|
||||
(japanese.first.furigana != null &&
|
||||
kanjiInfo[japanese.first.base] == JMdictKanjiInfo.rK) ||
|
||||
senses.where((sense) => sense.misc.contains(JMdictMisc.onlyKana)).length >
|
||||
(senses.length / 2);
|
||||
|
||||
/// All contents of [japanese], transliterated to romaji
|
||||
List<String> get romaji => japanese
|
||||
.map((word) => transliterateKanaToLatin(word.furigana ?? word.base))
|
||||
.toList();
|
||||
|
||||
/// All contents of [japanase], where the furigana has either been transliterated to romaji, or
|
||||
/// contains the furigana transliteration of [WordSearchRuby.base].
|
||||
List<WordSearchRuby> get romajiRubys => japanese
|
||||
.map(
|
||||
(word) => WordSearchRuby(
|
||||
base: word.base,
|
||||
furigana: word.furigana != null
|
||||
? transliterateKanaToLatin(word.furigana!)
|
||||
: transliterateKanaToLatin(word.base),
|
||||
),
|
||||
)
|
||||
.toList();
|
||||
|
||||
/// The same list of spans as [matchSpans], but the positions have been adjusted for romaji conversion
|
||||
///
|
||||
/// This is mostly useful in conjunction with [romajiRubys].
|
||||
List<WordSearchMatchSpan>? get romajiMatchSpans {
|
||||
if (matchSpans == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw UnimplementedError('Not yet implemented');
|
||||
}
|
||||
|
||||
WordSearchResult({
|
||||
required this.score,
|
||||
required this.entryId,
|
||||
@@ -107,7 +145,7 @@ class WordSearchResult {
|
||||
/// Infers which part(s) of this word search result matched the search keyword, and populates [matchSpans] accordingly.
|
||||
void inferMatchSpans(
|
||||
String searchword, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
SearchMode searchMode = SearchMode.auto,
|
||||
}) {
|
||||
// TODO: handle wildcards like '?' and '*' when that becomes supported in the search.
|
||||
// TODO: If the searchMode is provided, we can use that to narrow down which part of the word search results to look at.
|
||||
@@ -163,7 +201,7 @@ class WordSearchResult {
|
||||
this.matchSpans = matchSpans;
|
||||
}
|
||||
|
||||
String _formatJapaneseWord(WordSearchRuby word) =>
|
||||
static String _formatJapaneseWord(WordSearchRuby word) =>
|
||||
word.furigana == null ? word.base : '${word.base} (${word.furigana})';
|
||||
|
||||
@override
|
||||
|
||||
@@ -30,7 +30,7 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
/// Search for a word in the database.
|
||||
Future<List<WordSearchResult>?> jadbSearchWord(
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
SearchMode searchMode = SearchMode.auto,
|
||||
int page = 0,
|
||||
int? pageSize,
|
||||
}) => searchWordWithDbConnection(
|
||||
@@ -54,7 +54,7 @@ extension JaDBConnection on DatabaseExecutor {
|
||||
/// Search for a word in the database, and return the count of results.
|
||||
Future<int?> jadbSearchWordCount(
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
SearchMode searchMode = SearchMode.auto,
|
||||
}) => searchWordCountWithDbConnection(this, word, searchMode: searchMode);
|
||||
|
||||
/// Given a list of radicals, search which kanji contains all
|
||||
|
||||
@@ -18,7 +18,15 @@ Future<List<String>> filterKanjiWithDbConnection(
|
||||
.then((value) => value.map((e) => e['literal'] as String).toSet());
|
||||
|
||||
if (deduplicate) {
|
||||
return filteredKanji.toList();
|
||||
final List<String> result = [];
|
||||
final Set<String> seen = {};
|
||||
for (final k in kanji) {
|
||||
if (filteredKanji.contains(k) && !seen.contains(k)) {
|
||||
result.add(k);
|
||||
seen.add(k);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
return kanji.where((k) => filteredKanji.contains(k)).toList();
|
||||
}
|
||||
|
||||
@@ -5,140 +5,197 @@ import 'package:jadb/table_names/kanjidic.dart';
|
||||
import 'package:jadb/table_names/radkfile.dart';
|
||||
import 'package:sqflite_common/sqflite.dart';
|
||||
|
||||
Future<List<Map<String, Object?>>> _charactersQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.rawQuery(
|
||||
'''
|
||||
SELECT
|
||||
"${KANJIDICTableNames.character}"."literal",
|
||||
"${KANJIDICTableNames.character}"."strokeCount",
|
||||
"${KANJIDICTableNames.grade}"."grade",
|
||||
"${KANJIDICTableNames.jlpt}"."jlpt",
|
||||
"${KANJIDICTableNames.frequency}"."frequency"
|
||||
FROM "${KANJIDICTableNames.character}"
|
||||
LEFT JOIN "${KANJIDICTableNames.grade}" ON "${KANJIDICTableNames.character}"."literal" = "${KANJIDICTableNames.grade}"."kanji"
|
||||
LEFT JOIN "${KANJIDICTableNames.jlpt}" ON "${KANJIDICTableNames.character}"."literal" = "${KANJIDICTableNames.jlpt}"."kanji"
|
||||
LEFT JOIN "${KANJIDICTableNames.frequency}" ON "${KANJIDICTableNames.character}"."literal" = "${KANJIDICTableNames.frequency}"."kanji"
|
||||
WHERE "literal" = ?
|
||||
''',
|
||||
[kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _codepointsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.codepoint,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _kunyomisQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.kunyomi,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _onyomisQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.onyomi,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _meaningsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.meaning,
|
||||
where: 'kanji = ? AND language = ?',
|
||||
whereArgs: [kanji, 'eng'],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _nanorisQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.nanori,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _dictionaryReferencesQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.dictionaryReference,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _queryCodesQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.queryCode,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _radicalsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.rawQuery(
|
||||
'''
|
||||
SELECT DISTINCT
|
||||
"XREF__KANJIDIC_Radical__RADKFILE"."radicalSymbol" AS "symbol",
|
||||
"names"
|
||||
FROM "${KANJIDICTableNames.radical}"
|
||||
JOIN "XREF__KANJIDIC_Radical__RADKFILE" USING ("radicalId")
|
||||
LEFT JOIN (
|
||||
SELECT "radicalId", group_concat("name") AS "names"
|
||||
FROM "${KANJIDICTableNames.radicalName}"
|
||||
GROUP BY "radicalId"
|
||||
) USING ("radicalId")
|
||||
WHERE "${KANJIDICTableNames.radical}"."kanji" = ?
|
||||
''',
|
||||
[kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _partsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
RADKFILETableNames.radkfile,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _readingsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.reading,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _strokeMiscountsQuery(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) => connection.query(
|
||||
KANJIDICTableNames.strokeMiscount,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
// Future<List<Map<String, Object?>>> _variantsQuery(
|
||||
// DatabaseExecutor connection,
|
||||
// String kanji,
|
||||
// ) => connection.query(
|
||||
// KANJIDICTableNames.variant,
|
||||
// where: 'kanji = ?',
|
||||
// whereArgs: [kanji],
|
||||
// );
|
||||
|
||||
/// Searches for a kanji character and returns its details, or null if the kanji is not found in the database.
|
||||
Future<KanjiSearchResult?> searchKanjiWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
String kanji,
|
||||
) async {
|
||||
late final List<Map<String, Object?>> characters;
|
||||
final charactersQuery = connection.query(
|
||||
KANJIDICTableNames.character,
|
||||
where: 'literal = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> codepoints;
|
||||
final codepointsQuery = connection.query(
|
||||
KANJIDICTableNames.codepoint,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> kunyomis;
|
||||
final kunyomisQuery = connection.query(
|
||||
KANJIDICTableNames.kunyomi,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> onyomis;
|
||||
final onyomisQuery = connection.query(
|
||||
KANJIDICTableNames.onyomi,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> meanings;
|
||||
final meaningsQuery = connection.query(
|
||||
KANJIDICTableNames.meaning,
|
||||
where: 'kanji = ? AND language = ?',
|
||||
whereArgs: [kanji, 'eng'],
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> nanoris;
|
||||
final nanorisQuery = connection.query(
|
||||
KANJIDICTableNames.nanori,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> dictionaryReferences;
|
||||
final dictionaryReferencesQuery = connection.query(
|
||||
KANJIDICTableNames.dictionaryReference,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> queryCodes;
|
||||
final queryCodesQuery = connection.query(
|
||||
KANJIDICTableNames.queryCode,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> radicals;
|
||||
final radicalsQuery = connection.rawQuery(
|
||||
'''
|
||||
SELECT DISTINCT
|
||||
"XREF__KANJIDIC_Radical__RADKFILE"."radicalSymbol" AS "symbol",
|
||||
"names"
|
||||
FROM "${KANJIDICTableNames.radical}"
|
||||
JOIN "XREF__KANJIDIC_Radical__RADKFILE" USING ("radicalId")
|
||||
LEFT JOIN (
|
||||
SELECT "radicalId", group_concat("name") AS "names"
|
||||
FROM "${KANJIDICTableNames.radicalName}"
|
||||
GROUP BY "radicalId"
|
||||
) USING ("radicalId")
|
||||
WHERE "${KANJIDICTableNames.radical}"."kanji" = ?
|
||||
''',
|
||||
[kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> parts;
|
||||
final partsQuery = connection.query(
|
||||
RADKFILETableNames.radkfile,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readings;
|
||||
final readingsQuery = connection.query(
|
||||
KANJIDICTableNames.reading,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> strokeMiscounts;
|
||||
final strokeMiscountsQuery = connection.query(
|
||||
KANJIDICTableNames.strokeMiscount,
|
||||
where: 'kanji = ?',
|
||||
whereArgs: [kanji],
|
||||
);
|
||||
|
||||
// TODO: add variant data to result
|
||||
// late final List<Map<String, Object?>> variants;
|
||||
// final variants_query = connection.query(
|
||||
// KANJIDICTableNames.variant,
|
||||
// where: "kanji = ?",
|
||||
// whereArgs: [kanji],
|
||||
// );
|
||||
|
||||
// TODO: Search for kunyomi and onyomi usage of the characters
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for mathces,
|
||||
// from JMDict. We'll need to fuzzy aquery JMDict_KanjiElement for matches,
|
||||
// filter JMdict_ReadingElement for kunyomi/onyomi, and then sort the main entry
|
||||
// by JLPT, news frequency, etc.
|
||||
|
||||
await charactersQuery.then((value) => characters = value);
|
||||
await _charactersQuery(connection, kanji).then((value) => characters = value);
|
||||
|
||||
if (characters.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
await Future.wait({
|
||||
codepointsQuery.then((value) => codepoints = value),
|
||||
kunyomisQuery.then((value) => kunyomis = value),
|
||||
onyomisQuery.then((value) => onyomis = value),
|
||||
meaningsQuery.then((value) => meanings = value),
|
||||
nanorisQuery.then((value) => nanoris = value),
|
||||
dictionaryReferencesQuery.then((value) => dictionaryReferences = value),
|
||||
queryCodesQuery.then((value) => queryCodes = value),
|
||||
radicalsQuery.then((value) => radicals = value),
|
||||
partsQuery.then((value) => parts = value),
|
||||
readingsQuery.then((value) => readings = value),
|
||||
strokeMiscountsQuery.then((value) => strokeMiscounts = value),
|
||||
_codepointsQuery(connection, kanji).then((value) => codepoints = value),
|
||||
_kunyomisQuery(connection, kanji).then((value) => kunyomis = value),
|
||||
_onyomisQuery(connection, kanji).then((value) => onyomis = value),
|
||||
_meaningsQuery(connection, kanji).then((value) => meanings = value),
|
||||
_nanorisQuery(connection, kanji).then((value) => nanoris = value),
|
||||
_dictionaryReferencesQuery(
|
||||
connection,
|
||||
kanji,
|
||||
).then((value) => dictionaryReferences = value),
|
||||
_queryCodesQuery(connection, kanji).then((value) => queryCodes = value),
|
||||
_radicalsQuery(connection, kanji).then((value) => radicals = value),
|
||||
_partsQuery(connection, kanji).then((value) => parts = value),
|
||||
_readingsQuery(connection, kanji).then((value) => readings = value),
|
||||
_strokeMiscountsQuery(
|
||||
connection,
|
||||
kanji,
|
||||
).then((value) => strokeMiscounts = value),
|
||||
// variants_query.then((value) => variants = value),
|
||||
});
|
||||
|
||||
|
||||
@@ -53,274 +53,387 @@ class LinearWordQueryData {
|
||||
});
|
||||
}
|
||||
|
||||
Future<List<Map<String, Object?>>> _sensesQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.sense,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _readingelementsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'elementId',
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _kanjielementsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'elementId',
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _jlpttagsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => connection.query(
|
||||
TanosJLPTTableNames.jlptTag,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _commonentriesQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => connection.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
// Sense queries
|
||||
|
||||
Future<List<Map<String, Object?>>> _senseantonymsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseAntonyms}".senseId,
|
||||
"${JMdictTableNames.senseAntonyms}".ambiguous,
|
||||
"${JMdictTableNames.senseAntonyms}".xrefEntryId,
|
||||
"JMdict_BaseAndFurigana"."base",
|
||||
"JMdict_BaseAndFurigana"."furigana"
|
||||
FROM "${JMdictTableNames.senseAntonyms}"
|
||||
JOIN "JMdict_BaseAndFurigana"
|
||||
ON "${JMdictTableNames.senseAntonyms}"."xrefEntryId" = "JMdict_BaseAndFurigana"."entryId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
AND "JMdict_BaseAndFurigana"."isFirst"
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseAntonyms}"."senseId",
|
||||
"${JMdictTableNames.senseAntonyms}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senseseealsosQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."ambiguous",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId",
|
||||
"JMdict_BaseAndFurigana"."base",
|
||||
"JMdict_BaseAndFurigana"."furigana"
|
||||
FROM "${JMdictTableNames.senseSeeAlso}"
|
||||
JOIN "JMdict_BaseAndFurigana"
|
||||
ON "${JMdictTableNames.senseSeeAlso}"."xrefEntryId" = "JMdict_BaseAndFurigana"."entryId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
AND "JMdict_BaseAndFurigana"."isFirst"
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _sensedialectsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _sensefieldsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseField,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senseglossariesQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senseinfosQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseInfo,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senselanguagesourcesQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _sensemiscsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.senseMisc,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _sensepossQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.sensePOS,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senserestrictedtokanjisQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseRestrictedToKanji}".senseId,
|
||||
"${JMdictTableNames.senseRestrictedToKanji}".kanjiElementId,
|
||||
"${JMdictTableNames.kanjiElement}".reading
|
||||
FROM "${JMdictTableNames.senseRestrictedToKanji}"
|
||||
JOIN "${JMdictTableNames.kanjiElement}"
|
||||
ON "${JMdictTableNames.senseRestrictedToKanji}"."kanjiElementId" = "${JMdictTableNames.kanjiElement}"."elementId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseRestrictedToKanji}"."senseId",
|
||||
"${JMdictTableNames.senseRestrictedToKanji}"."kanjiElementId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _senserestrictedtoreadingsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseRestrictedToReading}".senseId,
|
||||
"${JMdictTableNames.senseRestrictedToReading}".readingElementId,
|
||||
"${JMdictTableNames.readingElement}".reading
|
||||
FROM "${JMdictTableNames.senseRestrictedToReading}"
|
||||
JOIN "${JMdictTableNames.readingElement}"
|
||||
ON "${JMdictTableNames.senseRestrictedToReading}"."readingElementId" = "${JMdictTableNames.readingElement}"."elementId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseRestrictedToReading}"."senseId",
|
||||
"${JMdictTableNames.senseRestrictedToReading}"."readingElementId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _examplesentencesQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> senseIds,
|
||||
) => connection.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
// Reading/kanji elements queries
|
||||
|
||||
Future<List<Map<String, Object?>>> _readingelementinfosQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> readingIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.readingInfo,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _readingelementrestrictionsQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> readingIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.readingRestriction,
|
||||
where: '(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
Future<List<Map<String, Object?>>> _kanjielementinfosQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> kanjiIds,
|
||||
) => connection.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where: '(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
|
||||
// Xref queries
|
||||
|
||||
Future<LinearWordQueryData?> _senseantonymdataQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => fetchLinearWordQueryData(connection, entryIds, fetchXrefData: false);
|
||||
|
||||
Future<LinearWordQueryData?> _senseseealsodataQuery(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds,
|
||||
) => fetchLinearWordQueryData(connection, entryIds, fetchXrefData: false);
|
||||
|
||||
// Full query
|
||||
|
||||
Future<LinearWordQueryData> fetchLinearWordQueryData(
|
||||
DatabaseExecutor connection,
|
||||
List<int> entryIds, {
|
||||
bool fetchXrefData = true,
|
||||
}) async {
|
||||
late final List<Map<String, Object?>> senses;
|
||||
final Future<List<Map<String, Object?>>> sensesQuery = connection.query(
|
||||
JMdictTableNames.sense,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElements;
|
||||
final Future<List<Map<String, Object?>>> readingelementsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.readingElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElements;
|
||||
final Future<List<Map<String, Object?>>> kanjielementsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiElement,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
orderBy: 'orderNum',
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> jlptTags;
|
||||
final Future<List<Map<String, Object?>>> jlpttagsQuery = connection.query(
|
||||
TanosJLPTTableNames.jlptTag,
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> commonEntries;
|
||||
final Future<List<Map<String, Object?>>> commonentriesQuery = connection
|
||||
.query(
|
||||
'JMdict_EntryCommon',
|
||||
where: 'entryId IN (${List.filled(entryIds.length, '?').join(',')})',
|
||||
whereArgs: entryIds,
|
||||
);
|
||||
|
||||
await Future.wait([
|
||||
sensesQuery.then((value) => senses = value),
|
||||
readingelementsQuery.then((value) => readingElements = value),
|
||||
kanjielementsQuery.then((value) => kanjiElements = value),
|
||||
jlpttagsQuery.then((value) => jlptTags = value),
|
||||
commonentriesQuery.then((value) => commonEntries = value),
|
||||
_sensesQuery(connection, entryIds).then((value) => senses = value),
|
||||
_readingelementsQuery(
|
||||
connection,
|
||||
entryIds,
|
||||
).then((value) => readingElements = value),
|
||||
_kanjielementsQuery(
|
||||
connection,
|
||||
entryIds,
|
||||
).then((value) => kanjiElements = value),
|
||||
_jlpttagsQuery(connection, entryIds).then((value) => jlptTags = value),
|
||||
_commonentriesQuery(
|
||||
connection,
|
||||
entryIds,
|
||||
).then((value) => commonEntries = value),
|
||||
]);
|
||||
|
||||
// Sense queries
|
||||
|
||||
final senseIds = senses.map((sense) => sense['senseId'] as int).toList();
|
||||
|
||||
late final List<Map<String, Object?>> senseAntonyms;
|
||||
final Future<List<Map<String, Object?>>> senseantonymsQuery = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseAntonyms}".senseId,
|
||||
"${JMdictTableNames.senseAntonyms}".ambiguous,
|
||||
"${JMdictTableNames.senseAntonyms}".xrefEntryId,
|
||||
"JMdict_BaseAndFurigana"."base",
|
||||
"JMdict_BaseAndFurigana"."furigana"
|
||||
FROM "${JMdictTableNames.senseAntonyms}"
|
||||
JOIN "JMdict_BaseAndFurigana"
|
||||
ON "${JMdictTableNames.senseAntonyms}"."xrefEntryId" = "JMdict_BaseAndFurigana"."entryId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
AND "JMdict_BaseAndFurigana"."isFirst"
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseAntonyms}"."senseId",
|
||||
"${JMdictTableNames.senseAntonyms}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseDialects;
|
||||
final Future<List<Map<String, Object?>>> sensedialectsQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.senseDialect,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseFields;
|
||||
final Future<List<Map<String, Object?>>> sensefieldsQuery = connection.query(
|
||||
JMdictTableNames.senseField,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseGlossaries;
|
||||
final Future<List<Map<String, Object?>>> senseglossariesQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.senseGlossary,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseInfos;
|
||||
final Future<List<Map<String, Object?>>> senseinfosQuery = connection.query(
|
||||
JMdictTableNames.senseInfo,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseLanguageSources;
|
||||
final Future<List<Map<String, Object?>>> senselanguagesourcesQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseLanguageSource,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseMiscs;
|
||||
final Future<List<Map<String, Object?>>> sensemiscsQuery = connection.query(
|
||||
JMdictTableNames.senseMisc,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> sensePOSs;
|
||||
final Future<List<Map<String, Object?>>> sensepossQuery = connection.query(
|
||||
JMdictTableNames.sensePOS,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseRestrictedToKanjis;
|
||||
final Future<List<Map<String, Object?>>> senserestrictedtokanjisQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToKanji,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseRestrictedToReadings;
|
||||
final Future<List<Map<String, Object?>>> senserestrictedtoreadingsQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.senseRestrictedToReading,
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> senseSeeAlsos;
|
||||
final Future<List<Map<String, Object?>>> senseseealsosQuery = connection
|
||||
.rawQuery(
|
||||
"""
|
||||
SELECT
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."ambiguous",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId",
|
||||
"JMdict_BaseAndFurigana"."base",
|
||||
"JMdict_BaseAndFurigana"."furigana"
|
||||
FROM "${JMdictTableNames.senseSeeAlso}"
|
||||
JOIN "JMdict_BaseAndFurigana"
|
||||
ON "${JMdictTableNames.senseSeeAlso}"."xrefEntryId" = "JMdict_BaseAndFurigana"."entryId"
|
||||
WHERE
|
||||
"senseId" IN (${List.filled(senseIds.length, '?').join(',')})
|
||||
AND "JMdict_BaseAndFurigana"."isFirst"
|
||||
ORDER BY
|
||||
"${JMdictTableNames.senseSeeAlso}"."senseId",
|
||||
"${JMdictTableNames.senseSeeAlso}"."xrefEntryId"
|
||||
""",
|
||||
[...senseIds],
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> exampleSentences;
|
||||
final Future<List<Map<String, Object?>>> examplesentencesQuery = connection
|
||||
.query(
|
||||
'JMdict_ExampleSentence',
|
||||
where: 'senseId IN (${List.filled(senseIds.length, '?').join(',')})',
|
||||
whereArgs: senseIds,
|
||||
);
|
||||
|
||||
// Reading queries
|
||||
|
||||
final readingIds = readingElements
|
||||
.map((element) => element['elementId'] as int)
|
||||
.toList();
|
||||
|
||||
late final List<Map<String, Object?>> readingElementInfos;
|
||||
final Future<List<Map<String, Object?>>> readingelementinfosQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.readingInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
late final List<Map<String, Object?>> readingElementRestrictions;
|
||||
final Future<List<Map<String, Object?>>> readingelementrestrictionsQuery =
|
||||
connection.query(
|
||||
JMdictTableNames.readingRestriction,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(readingIds.length, '?').join(',')})',
|
||||
whereArgs: readingIds,
|
||||
);
|
||||
|
||||
// Kanji queries
|
||||
|
||||
final kanjiIds = kanjiElements
|
||||
.map((element) => element['elementId'] as int)
|
||||
.toList();
|
||||
|
||||
late final List<Map<String, Object?>> readingElementInfos;
|
||||
late final List<Map<String, Object?>> readingElementRestrictions;
|
||||
|
||||
late final List<Map<String, Object?>> kanjiElementInfos;
|
||||
final Future<List<Map<String, Object?>>> kanjielementinfosQuery = connection
|
||||
.query(
|
||||
JMdictTableNames.kanjiInfo,
|
||||
where:
|
||||
'(elementId) IN (${List.filled(kanjiIds.length, '?').join(',')})',
|
||||
whereArgs: kanjiIds,
|
||||
);
|
||||
|
||||
// Xref data queries
|
||||
await Future.wait([
|
||||
senseantonymsQuery.then((value) => senseAntonyms = value),
|
||||
senseseealsosQuery.then((value) => senseSeeAlsos = value),
|
||||
_senseantonymsQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseAntonyms = value),
|
||||
_senseseealsosQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseSeeAlsos = value),
|
||||
]);
|
||||
|
||||
late final LinearWordQueryData? senseAntonymData;
|
||||
final Future<LinearWordQueryData?> senseantonymdataQuery =
|
||||
fetchXrefData
|
||||
? fetchLinearWordQueryData(
|
||||
connection,
|
||||
senseAntonyms
|
||||
.map((antonym) => antonym['xrefEntryId'] as int)
|
||||
.toList(),
|
||||
fetchXrefData: false,
|
||||
)
|
||||
: Future.value(null);
|
||||
|
||||
late final LinearWordQueryData? senseSeeAlsoData;
|
||||
final Future<LinearWordQueryData?> senseseealsodataQuery =
|
||||
fetchXrefData
|
||||
? fetchLinearWordQueryData(
|
||||
connection,
|
||||
senseSeeAlsos.map((seeAlso) => seeAlso['xrefEntryId'] as int).toList(),
|
||||
fetchXrefData: false,
|
||||
)
|
||||
: Future.value(null);
|
||||
LinearWordQueryData? senseAntonymData;
|
||||
LinearWordQueryData? senseSeeAlsoData;
|
||||
|
||||
await Future.wait([
|
||||
sensedialectsQuery.then((value) => senseDialects = value),
|
||||
sensefieldsQuery.then((value) => senseFields = value),
|
||||
senseglossariesQuery.then((value) => senseGlossaries = value),
|
||||
senseinfosQuery.then((value) => senseInfos = value),
|
||||
senselanguagesourcesQuery.then((value) => senseLanguageSources = value),
|
||||
sensemiscsQuery.then((value) => senseMiscs = value),
|
||||
sensepossQuery.then((value) => sensePOSs = value),
|
||||
senserestrictedtokanjisQuery.then(
|
||||
(value) => senseRestrictedToKanjis = value,
|
||||
),
|
||||
senserestrictedtoreadingsQuery.then(
|
||||
(value) => senseRestrictedToReadings = value,
|
||||
),
|
||||
examplesentencesQuery.then((value) => exampleSentences = value),
|
||||
readingelementinfosQuery.then((value) => readingElementInfos = value),
|
||||
readingelementrestrictionsQuery.then(
|
||||
(value) => readingElementRestrictions = value,
|
||||
),
|
||||
kanjielementinfosQuery.then((value) => kanjiElementInfos = value),
|
||||
senseantonymdataQuery.then((value) => senseAntonymData = value),
|
||||
senseseealsodataQuery.then((value) => senseSeeAlsoData = value),
|
||||
_sensedialectsQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseDialects = value),
|
||||
_sensefieldsQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseFields = value),
|
||||
_senseglossariesQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseGlossaries = value),
|
||||
_senseinfosQuery(connection, senseIds).then((value) => senseInfos = value),
|
||||
_senselanguagesourcesQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseLanguageSources = value),
|
||||
_sensemiscsQuery(connection, senseIds).then((value) => senseMiscs = value),
|
||||
_sensepossQuery(connection, senseIds).then((value) => sensePOSs = value),
|
||||
_senserestrictedtokanjisQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseRestrictedToKanjis = value),
|
||||
_senserestrictedtoreadingsQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => senseRestrictedToReadings = value),
|
||||
_examplesentencesQuery(
|
||||
connection,
|
||||
senseIds,
|
||||
).then((value) => exampleSentences = value),
|
||||
_readingelementinfosQuery(
|
||||
connection,
|
||||
readingIds,
|
||||
).then((value) => readingElementInfos = value),
|
||||
_readingelementrestrictionsQuery(
|
||||
connection,
|
||||
readingIds,
|
||||
).then((value) => readingElementRestrictions = value),
|
||||
_kanjielementinfosQuery(
|
||||
connection,
|
||||
kanjiIds,
|
||||
).then((value) => kanjiElementInfos = value),
|
||||
|
||||
if (fetchXrefData)
|
||||
_senseantonymdataQuery(
|
||||
connection,
|
||||
senseAntonyms.map((antonym) => antonym['xrefEntryId'] as int).toList(),
|
||||
).then((value) => senseAntonymData = value),
|
||||
|
||||
if (fetchXrefData)
|
||||
_senseseealsodataQuery(
|
||||
connection,
|
||||
senseSeeAlsos.map((seeAlso) => seeAlso['xrefEntryId'] as int).toList(),
|
||||
).then((value) => senseSeeAlsoData = value),
|
||||
]);
|
||||
|
||||
return LinearWordQueryData(
|
||||
|
||||
@@ -15,15 +15,15 @@ SearchMode _determineSearchMode(String word) {
|
||||
final bool containsAscii = RegExp(r'[A-Za-z]').hasMatch(word);
|
||||
|
||||
if (containsKanji && containsAscii) {
|
||||
return SearchMode.MixedKanji;
|
||||
return SearchMode.mixedKanji;
|
||||
} else if (containsKanji) {
|
||||
return SearchMode.Kanji;
|
||||
return SearchMode.kanji;
|
||||
} else if (containsAscii) {
|
||||
return SearchMode.English;
|
||||
return SearchMode.english;
|
||||
} else if (word.contains(hiraganaRegex) || word.contains(katakanaRegex)) {
|
||||
return SearchMode.Kana;
|
||||
return SearchMode.kana;
|
||||
} else {
|
||||
return SearchMode.MixedKana;
|
||||
return SearchMode.mixedKana;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ String _filterFTSSensitiveCharacters(String word) {
|
||||
JOIN "$tableName" USING ("elementId")
|
||||
JOIN "JMdict_EntryScore" USING ("elementId")
|
||||
WHERE "${tableName}FTS"."reading" MATCH ? || '*'
|
||||
AND "JMdict_EntryScore"."type" = '${tableName == JMdictTableNames.kanjiElement ? 'k' : 'r'}'
|
||||
AND "JMdict_EntryScore"."elementId" ${tableName == JMdictTableNames.kanjiElement ? '<' : '>='} 1000000000
|
||||
),
|
||||
non_fts_results AS (
|
||||
SELECT DISTINCT
|
||||
@@ -86,7 +86,7 @@ String _filterFTSSensitiveCharacters(String word) {
|
||||
JOIN "JMdict_EntryScore" USING ("elementId")
|
||||
WHERE "reading" LIKE '%' || ? || '%'
|
||||
AND "$tableName"."entryId" NOT IN (SELECT "entryId" FROM "fts_results")
|
||||
AND "JMdict_EntryScore"."type" = '${tableName == JMdictTableNames.kanjiElement ? 'k' : 'r'}'
|
||||
AND "JMdict_EntryScore"."elementId" ${tableName == JMdictTableNames.kanjiElement ? '<' : '>='} 1000000000
|
||||
)
|
||||
|
||||
SELECT ${countOnly ? 'COUNT(DISTINCT "entryId") AS count' : '"entryId", MAX("score") AS "score"'}
|
||||
@@ -199,23 +199,23 @@ Future<List<ScoredEntryId>> _queryEnglish(
|
||||
SELECT
|
||||
"${JMdictTableNames.sense}"."entryId",
|
||||
MAX("JMdict_EntryScore"."score")
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ? AND "${JMdictTableNames.sense}"."orderNum" = 1) * 50)
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ? AND "${JMdictTableNames.sense}"."orderNum" = 2) * 30)
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ?) * 20)
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ?1 AND "${JMdictTableNames.sense}"."orderNum" = 0) * 50)
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ?1 AND "${JMdictTableNames.sense}"."orderNum" = 1) * 30)
|
||||
+ (("${JMdictTableNames.senseGlossary}"."phrase" = ?1) * 20)
|
||||
as "score"
|
||||
FROM "${JMdictTableNames.senseGlossary}"
|
||||
JOIN "${JMdictTableNames.sense}" USING ("senseId")
|
||||
JOIN "JMdict_EntryScore" USING ("entryId")
|
||||
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?
|
||||
WHERE "${JMdictTableNames.senseGlossary}"."phrase" LIKE ?2
|
||||
GROUP BY "JMdict_EntryScore"."entryId"
|
||||
ORDER BY
|
||||
"score" DESC,
|
||||
"${JMdictTableNames.sense}"."entryId" ASC
|
||||
LIMIT ?
|
||||
OFFSET ?
|
||||
${pageSize != null ? 'LIMIT ?3' : ''}
|
||||
${offset != null ? 'OFFSET ?4' : ''}
|
||||
'''
|
||||
.trim(),
|
||||
[word, word, word, '%${word.replaceAll('%', '')}%', pageSize, offset],
|
||||
[word, '%${word.replaceAll('%', '')}%', if (pageSize != null) pageSize, if (offset != null) offset],
|
||||
);
|
||||
|
||||
return result
|
||||
@@ -246,7 +246,7 @@ Future<List<ScoredEntryId>> fetchEntryIds(
|
||||
int? pageSize,
|
||||
int? offset,
|
||||
) async {
|
||||
if (searchMode == SearchMode.Auto) {
|
||||
if (searchMode == SearchMode.auto) {
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
@@ -254,20 +254,20 @@ Future<List<ScoredEntryId>> fetchEntryIds(
|
||||
|
||||
late final List<ScoredEntryId> entryIds;
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
case SearchMode.kanji:
|
||||
entryIds = await _queryKanji(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
case SearchMode.kana:
|
||||
entryIds = await _queryKana(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
case SearchMode.english:
|
||||
entryIds = await _queryEnglish(connection, word, pageSize, offset);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
case SearchMode.mixedKana:
|
||||
case SearchMode.mixedKanji:
|
||||
default:
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
@@ -280,7 +280,7 @@ Future<int?> fetchEntryIdCount(
|
||||
String word,
|
||||
SearchMode searchMode,
|
||||
) async {
|
||||
if (searchMode == SearchMode.Auto) {
|
||||
if (searchMode == SearchMode.auto) {
|
||||
searchMode = _determineSearchMode(word);
|
||||
}
|
||||
|
||||
@@ -289,20 +289,20 @@ Future<int?> fetchEntryIdCount(
|
||||
late final int? entryIdCount;
|
||||
|
||||
switch (searchMode) {
|
||||
case SearchMode.Kanji:
|
||||
case SearchMode.kanji:
|
||||
entryIdCount = await _queryKanjiCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.Kana:
|
||||
case SearchMode.kana:
|
||||
entryIdCount = await _queryKanaCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.English:
|
||||
case SearchMode.english:
|
||||
entryIdCount = await _queryEnglishCount(connection, word);
|
||||
break;
|
||||
|
||||
case SearchMode.MixedKana:
|
||||
case SearchMode.MixedKanji:
|
||||
case SearchMode.mixedKana:
|
||||
case SearchMode.mixedKanji:
|
||||
default:
|
||||
throw UnimplementedError('Search mode $searchMode is not implemented');
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
.where((element) => element['entryId'] == scoredEntryId.entryId)
|
||||
.toList();
|
||||
|
||||
final GroupedWordResult entryReadingElementsGrouped = _regroup_words(
|
||||
final GroupedWordResult entryReadingElementsGrouped = _regroupWords(
|
||||
entryId: scoredEntryId.entryId,
|
||||
readingElements: entryReadingElements,
|
||||
kanjiElements: entryKanjiElements,
|
||||
@@ -64,7 +64,7 @@ List<WordSearchResult> regroupWordSearchResults({
|
||||
kanjiElementInfos: linearWordQueryData.kanjiElementInfos,
|
||||
);
|
||||
|
||||
final List<WordSearchSense> entrySensesGrouped = _regroup_senses(
|
||||
final List<WordSearchSense> entrySensesGrouped = _regroupSenses(
|
||||
senses: entrySenses,
|
||||
senseAntonyms: linearWordQueryData.senseAntonyms,
|
||||
senseDialects: linearWordQueryData.senseDialects,
|
||||
@@ -112,7 +112,7 @@ class GroupedWordResult {
|
||||
});
|
||||
}
|
||||
|
||||
GroupedWordResult _regroup_words({
|
||||
GroupedWordResult _regroupWords({
|
||||
required int entryId,
|
||||
required List<Map<String, Object?>> kanjiElements,
|
||||
required List<Map<String, Object?>> kanjiElementInfos,
|
||||
@@ -195,7 +195,7 @@ GroupedWordResult _regroup_words({
|
||||
);
|
||||
}
|
||||
|
||||
List<WordSearchSense> _regroup_senses({
|
||||
List<WordSearchSense> _regroupSenses({
|
||||
required List<Map<String, Object?>> senses,
|
||||
required List<Map<String, Object?>> senseAntonyms,
|
||||
required List<Map<String, Object?>> senseDialects,
|
||||
@@ -318,7 +318,7 @@ List<WordSearchSense> _regroup_senses({
|
||||
.map((e) => e['reading'] as String)
|
||||
.toList(),
|
||||
restrictedToKanji: restrictedToKanjis
|
||||
.map((e) => e['kanji'] as String)
|
||||
.map((e) => e['reading'] as String)
|
||||
.toList(),
|
||||
fields: fields
|
||||
.map((e) => JMdictField.fromId(e['field'] as String))
|
||||
|
||||
@@ -13,13 +13,31 @@ import 'package:jadb/search/word_search/regrouping.dart';
|
||||
import 'package:jadb/table_names/jmdict.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
enum SearchMode { Auto, English, Kanji, MixedKanji, Kana, MixedKana }
|
||||
enum SearchMode {
|
||||
/// Try to autodetect what is being searched for
|
||||
auto,
|
||||
|
||||
/// Search for english words
|
||||
english,
|
||||
|
||||
/// Search for the kanji reading of a word
|
||||
kanji,
|
||||
|
||||
/// Search for the kanji reading of a word, mixed in with kana/romaji
|
||||
mixedKanji,
|
||||
|
||||
/// Search for the kana reading of a word
|
||||
kana,
|
||||
|
||||
/// Search for the kana reading of a word, mixed in with romaji
|
||||
mixedKana,
|
||||
}
|
||||
|
||||
/// Searches for an input string, returning a list of results with their details. Returns null if the input string is empty.
|
||||
Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
SearchMode searchMode = SearchMode.auto,
|
||||
int page = 0,
|
||||
int? pageSize,
|
||||
}) async {
|
||||
@@ -63,7 +81,7 @@ Future<List<WordSearchResult>?> searchWordWithDbConnection(
|
||||
Future<int?> searchWordCountWithDbConnection(
|
||||
DatabaseExecutor connection,
|
||||
String word, {
|
||||
SearchMode searchMode = SearchMode.Auto,
|
||||
SearchMode searchMode = SearchMode.auto,
|
||||
}) async {
|
||||
if (word.isEmpty) {
|
||||
return null;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
abstract class JMdictTableNames {
|
||||
static const String version = 'JMdict_Version';
|
||||
static const String entry = 'JMdict_Entry';
|
||||
static const String kanjiElement = 'JMdict_KanjiElement';
|
||||
static const String kanjiInfo = 'JMdict_KanjiElementInfo';
|
||||
@@ -20,6 +21,7 @@ abstract class JMdictTableNames {
|
||||
static const String senseSeeAlso = 'JMdict_SenseSeeAlso';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
version,
|
||||
entry,
|
||||
kanjiElement,
|
||||
kanjiInfo,
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
abstract class KANJIDICTableNames {
|
||||
static const String version = 'KANJIDIC_Version';
|
||||
static const String character = 'KANJIDIC_Character';
|
||||
static const String grade = 'KANJIDIC_Grade';
|
||||
static const String frequency = 'KANJIDIC_Frequency';
|
||||
static const String jlpt = 'KANJIDIC_JLPT';
|
||||
static const String radicalName = 'KANJIDIC_RadicalName';
|
||||
static const String codepoint = 'KANJIDIC_Codepoint';
|
||||
static const String radical = 'KANJIDIC_Radical';
|
||||
@@ -17,7 +21,11 @@ abstract class KANJIDICTableNames {
|
||||
static const String nanori = 'KANJIDIC_Nanori';
|
||||
|
||||
static Set<String> get allTables => {
|
||||
version,
|
||||
character,
|
||||
grade,
|
||||
frequency,
|
||||
jlpt,
|
||||
radicalName,
|
||||
codepoint,
|
||||
radical,
|
||||
|
||||
9
lib/table_names/kanjivg.dart
Normal file
9
lib/table_names/kanjivg.dart
Normal file
@@ -0,0 +1,9 @@
|
||||
abstract class KanjiVGTableNames {
|
||||
static const String version = 'KanjiVG_Version';
|
||||
static const String entry = 'KanjiVG_Entry';
|
||||
static const String path = 'KanjiVG_Path';
|
||||
static const String strokeNumber = 'KanjiVG_StrokeNumber';
|
||||
static const String pathGroup = 'KanjiVG_PathGroup';
|
||||
|
||||
static Set<String> get allTables => {version, entry, path, strokeNumber, pathGroup};
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
abstract class RADKFILETableNames {
|
||||
static const String version = 'RADKFILE_Version';
|
||||
static const String radkfile = 'RADKFILE';
|
||||
|
||||
static Set<String> get allTables => {radkfile};
|
||||
static Set<String> get allTables => {version, radkfile};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
abstract class TanosJLPTTableNames {
|
||||
static const String version = 'JMdict_JLPT_Version';
|
||||
static const String jlptTag = 'JMdict_JLPTTag';
|
||||
|
||||
static Set<String> get allTables => {jlptTag};
|
||||
static Set<String> get allTables => {version, jlptTag};
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules.dart';
|
||||
|
||||
enum WordClass {
|
||||
@@ -10,6 +11,8 @@ enum WordClass {
|
||||
adverb,
|
||||
particle,
|
||||
input,
|
||||
|
||||
// TODO: add toString and fromString so it can be parsed by the cli
|
||||
}
|
||||
|
||||
enum LemmatizationRuleType { prefix, suffix }
|
||||
@@ -18,7 +21,7 @@ class LemmatizationRule {
|
||||
final String name;
|
||||
final AllomorphPattern pattern;
|
||||
final WordClass wordClass;
|
||||
final List<WordClass>? validChildClasses;
|
||||
final Set<WordClass>? validChildClasses;
|
||||
final bool terminal;
|
||||
|
||||
const LemmatizationRule({
|
||||
@@ -38,9 +41,9 @@ class LemmatizationRule {
|
||||
required String pattern,
|
||||
required String? replacement,
|
||||
required WordClass wordClass,
|
||||
validChildClasses,
|
||||
terminal = false,
|
||||
lookAheadBehind = const [''],
|
||||
Set<WordClass>? validChildClasses,
|
||||
bool terminal = false,
|
||||
List<Pattern> lookAheadBehind = const [''],
|
||||
LemmatizationRuleType type = LemmatizationRuleType.suffix,
|
||||
}) : this(
|
||||
name: name,
|
||||
@@ -55,6 +58,27 @@ class LemmatizationRule {
|
||||
terminal: terminal,
|
||||
wordClass: wordClass,
|
||||
);
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(
|
||||
name,
|
||||
pattern,
|
||||
wordClass,
|
||||
validChildClasses,
|
||||
terminal,
|
||||
SetEquality().hash(validChildClasses),
|
||||
);
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (identical(this, other)) return true;
|
||||
return other is LemmatizationRule &&
|
||||
other.name == name &&
|
||||
other.pattern == pattern &&
|
||||
other.wordClass == wordClass &&
|
||||
other.terminal == terminal &&
|
||||
SetEquality().equals(validChildClasses, other.validChildClasses);
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a set of patterns for matching allomorphs in a word.
|
||||
@@ -71,6 +95,7 @@ class AllomorphPattern {
|
||||
this.lookAheadBehind = const [''],
|
||||
});
|
||||
|
||||
/// Convert the [patterns] into regexes
|
||||
List<(String, Pattern)> get allPatternCombinations {
|
||||
final combinations = <(String, Pattern)>[];
|
||||
for (final l in lookAheadBehind) {
|
||||
@@ -94,6 +119,7 @@ class AllomorphPattern {
|
||||
return combinations;
|
||||
}
|
||||
|
||||
/// Check whether an input string matches any of the [patterns]
|
||||
bool matches(String word) {
|
||||
for (final (_, p) in allPatternCombinations) {
|
||||
if (p is String) {
|
||||
@@ -111,6 +137,9 @@ class AllomorphPattern {
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Apply the replacement for this pattern.
|
||||
///
|
||||
/// If none of the [patterns] apply, this function returns `null`.
|
||||
List<String>? apply(String word) {
|
||||
for (final (affix, p) in allPatternCombinations) {
|
||||
switch ((type, p is RegExp)) {
|
||||
@@ -157,6 +186,22 @@ class AllomorphPattern {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => Object.hash(
|
||||
type,
|
||||
ListEquality().hash(lookAheadBehind),
|
||||
MapEquality().hash(patterns),
|
||||
);
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) {
|
||||
if (identical(this, other)) return true;
|
||||
return other is AllomorphPattern &&
|
||||
other.type == type &&
|
||||
ListEquality().equals(other.lookAheadBehind, lookAheadBehind) &&
|
||||
MapEquality().equals(other.patterns, patterns);
|
||||
}
|
||||
}
|
||||
|
||||
class Lemmatized {
|
||||
@@ -203,9 +248,10 @@ List<Lemmatized> _lemmatize(LemmatizationRule parentRule, String word) {
|
||||
|
||||
final filteredLemmatizationRules = parentRule.validChildClasses == null
|
||||
? lemmatizationRules
|
||||
: lemmatizationRules.where(
|
||||
(r) => parentRule.validChildClasses!.contains(r.wordClass),
|
||||
);
|
||||
: [
|
||||
for (final wordClass in parentRule.validChildClasses!)
|
||||
...lemmatizationRulesByWordClass[wordClass]!,
|
||||
];
|
||||
|
||||
for (final rule in filteredLemmatizationRules) {
|
||||
if (rule.matches(word)) {
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/godan-verbs.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/i-adjectives.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/ichidan-verbs.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/godan_verbs.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/i_adjectives.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/ichidan_verbs.dart';
|
||||
|
||||
List<LemmatizationRule> lemmatizationRules = [
|
||||
final List<LemmatizationRule> lemmatizationRules = List.unmodifiable([
|
||||
...ichidanVerbLemmatizationRules,
|
||||
...godanVerbLemmatizationRules,
|
||||
...iAdjectiveLemmatizationRules,
|
||||
];
|
||||
]);
|
||||
|
||||
final Map<WordClass, List<LemmatizationRule>> lemmatizationRulesByWordClass =
|
||||
Map.unmodifiable({
|
||||
WordClass.ichidanVerb: ichidanVerbLemmatizationRules,
|
||||
WordClass.iAdjective: iAdjectiveLemmatizationRules,
|
||||
WordClass.godanVerb: godanVerbLemmatizationRules,
|
||||
});
|
||||
|
||||
@@ -1,457 +0,0 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
|
||||
List<LemmatizationRule> godanVerbLemmatizationRules = [
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - base form',
|
||||
terminal: true,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'う': ['う'],
|
||||
'く': ['く'],
|
||||
'ぐ': ['ぐ'],
|
||||
'す': ['す'],
|
||||
'つ': ['つ'],
|
||||
'ぬ': ['ぬ'],
|
||||
'ぶ': ['ぶ'],
|
||||
'む': ['む'],
|
||||
'る': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わない': ['う'],
|
||||
'かない': ['く'],
|
||||
'がない': ['ぐ'],
|
||||
'さない': ['す'],
|
||||
'たない': ['つ'],
|
||||
'なない': ['ぬ'],
|
||||
'ばない': ['ぶ'],
|
||||
'まない': ['む'],
|
||||
'らない': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - past form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'した': ['す'],
|
||||
'った': ['る', 'つ', 'う'],
|
||||
'んだ': ['む', 'ぬ', 'ぶ'],
|
||||
'いだ': ['ぐ'],
|
||||
'いた': ['く'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - te-form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いて': ['く', 'ぐ'],
|
||||
'して': ['す'],
|
||||
'って': ['る', 'つ', 'う'],
|
||||
'んで': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - te-form with いる',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いている': ['く', 'ぐ'],
|
||||
'している': ['す'],
|
||||
'っている': ['る', 'つ', 'う'],
|
||||
'んでいる': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - te-form with いた',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いていた': ['く', 'ぐ'],
|
||||
'していた': ['す'],
|
||||
'っていた': ['る', 'つ', 'う'],
|
||||
'んでいた': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - conditional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'けば': ['く'],
|
||||
'げば': ['ぐ'],
|
||||
'せば': ['す'],
|
||||
'てば': ['つ', 'る', 'う'],
|
||||
'ねば': ['ぬ'],
|
||||
'べば': ['ぶ'],
|
||||
'めば': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - volitional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'おう': ['う'],
|
||||
'こう': ['く'],
|
||||
'ごう': ['ぐ'],
|
||||
'そう': ['す'],
|
||||
'とう': ['つ', 'る', 'う'],
|
||||
'のう': ['ぬ'],
|
||||
'ぼう': ['ぶ'],
|
||||
'もう': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - potential form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'ける': ['く'],
|
||||
'げる': ['ぐ'],
|
||||
'せる': ['す'],
|
||||
'てる': ['つ', 'る', 'う'],
|
||||
'ねる': ['ぬ'],
|
||||
'べる': ['ぶ'],
|
||||
'める': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かれる': ['く'],
|
||||
'がれる': ['ぐ'],
|
||||
'される': ['す'],
|
||||
'たれる': ['つ', 'る', 'う'],
|
||||
'なれる': ['ぬ'],
|
||||
'ばれる': ['ぶ'],
|
||||
'まれる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - causative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かせる': ['く'],
|
||||
'がせる': ['ぐ'],
|
||||
'させる': ['す'],
|
||||
'たせる': ['つ', 'る', 'う'],
|
||||
'なせる': ['ぬ'],
|
||||
'ばせる': ['ぶ'],
|
||||
'ませる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - causative-passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かされる': ['く'],
|
||||
'がされる': ['ぐ'],
|
||||
'される': ['す'],
|
||||
'たされる': ['つ', 'る', 'う'],
|
||||
'なされる': ['ぬ'],
|
||||
'ばされる': ['ぶ'],
|
||||
'まされる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - imperative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'え': ['う'],
|
||||
'け': ['く'],
|
||||
'げ': ['ぐ'],
|
||||
'せ': ['す'],
|
||||
'て': ['つ', 'る', 'う'],
|
||||
'ね': ['ぬ'],
|
||||
'べ': ['ぶ'],
|
||||
'め': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative past form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなかった': ['う'],
|
||||
'かなかった': ['く'],
|
||||
'がなかった': ['ぐ'],
|
||||
'さなかった': ['す'],
|
||||
'たなかった': ['つ'],
|
||||
'ななかった': ['ぬ'],
|
||||
'ばなかった': ['ぶ'],
|
||||
'まなかった': ['む'],
|
||||
'らなかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative te-form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなくて': ['う'],
|
||||
'かなくて': ['く'],
|
||||
'がなくて': ['ぐ'],
|
||||
'さなくて': ['す'],
|
||||
'たなくて': ['つ'],
|
||||
'ななくて': ['ぬ'],
|
||||
'ばなくて': ['ぶ'],
|
||||
'まなくて': ['む'],
|
||||
'らなくて': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative conditional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなければ': ['う'],
|
||||
'かなければ': ['く'],
|
||||
'がなければ': ['ぐ'],
|
||||
'さなければ': ['す'],
|
||||
'たなければ': ['つ'],
|
||||
'ななければ': ['ぬ'],
|
||||
'ばなければ': ['ぶ'],
|
||||
'まなければ': ['む'],
|
||||
'らなければ': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative volitional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'うまい': ['う'],
|
||||
'くまい': ['く'],
|
||||
'ぐまい': ['ぐ'],
|
||||
'すまい': ['す'],
|
||||
'つまい': ['つ', 'る', 'う'],
|
||||
'ぬまい': ['ぬ'],
|
||||
'ぶまい': ['ぶ'],
|
||||
'むまい': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative potential form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'けない': ['く'],
|
||||
'げない': ['ぐ'],
|
||||
'せない': ['す'],
|
||||
'てない': ['つ', 'る', 'う'],
|
||||
'ねない': ['ぬ'],
|
||||
'べない': ['ぶ'],
|
||||
'めない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かれない': ['く'],
|
||||
'がれない': ['ぐ'],
|
||||
'されない': ['す'],
|
||||
'たれない': ['つ', 'る', 'う'],
|
||||
'なれない': ['ぬ'],
|
||||
'ばれない': ['ぶ'],
|
||||
'まれない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative causative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かせない': ['く'],
|
||||
'がせない': ['ぐ'],
|
||||
'させない': ['す'],
|
||||
'たせない': ['つ', 'る', 'う'],
|
||||
'なせない': ['ぬ'],
|
||||
'ばせない': ['ぶ'],
|
||||
'ませない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative causative-passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かされない': ['く'],
|
||||
'がされない': ['ぐ'],
|
||||
'されない': ['す'],
|
||||
'たされない': ['つ', 'る', 'う'],
|
||||
'なされない': ['ぬ'],
|
||||
'ばされない': ['ぶ'],
|
||||
'まされない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative imperative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'うな': ['う'],
|
||||
'くな': ['く'],
|
||||
'ぐな': ['ぐ'],
|
||||
'すな': ['す'],
|
||||
'つな': ['つ'],
|
||||
'ぬな': ['ぬ'],
|
||||
'ぶな': ['ぶ'],
|
||||
'むな': ['む'],
|
||||
'るな': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'きたい': ['く'],
|
||||
'ぎたい': ['ぐ'],
|
||||
'したい': ['す'],
|
||||
'ちたい': ['つ'],
|
||||
'にたい': ['ぬ'],
|
||||
'びたい': ['ぶ'],
|
||||
'みたい': ['む'],
|
||||
'りたい': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いたくない': ['う'],
|
||||
'きたくない': ['く'],
|
||||
'ぎたくない': ['ぐ'],
|
||||
'したくない': ['す'],
|
||||
'ちたくない': ['つ'],
|
||||
'にたくない': ['ぬ'],
|
||||
'びたくない': ['ぶ'],
|
||||
'みたくない': ['む'],
|
||||
'りたくない': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - past desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'きたかった': ['く'],
|
||||
'ぎたかった': ['ぐ'],
|
||||
'したかった': ['す'],
|
||||
'ちたかった': ['つ'],
|
||||
'にたかった': ['ぬ'],
|
||||
'びたかった': ['ぶ'],
|
||||
'みたかった': ['む'],
|
||||
'りたかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
LemmatizationRule(
|
||||
name: 'Godan verb - negative past desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いたくなかった': ['う'],
|
||||
'きたくなかった': ['く'],
|
||||
'ぎたくなかった': ['ぐ'],
|
||||
'したくなかった': ['す'],
|
||||
'ちたくなかった': ['つ'],
|
||||
'にたくなかった': ['ぬ'],
|
||||
'びたくなかった': ['ぶ'],
|
||||
'みたくなかった': ['む'],
|
||||
'りたくなかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: [WordClass.godanVerb],
|
||||
wordClass: WordClass.godanVerb,
|
||||
),
|
||||
];
|
||||
509
lib/util/lemmatizer/rules/godan_verbs.dart
Normal file
509
lib/util/lemmatizer/rules/godan_verbs.dart
Normal file
@@ -0,0 +1,509 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
|
||||
final LemmatizationRule godanVerbBase = LemmatizationRule(
|
||||
name: 'Godan verb - base form',
|
||||
terminal: true,
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'う': ['う'],
|
||||
'く': ['く'],
|
||||
'ぐ': ['ぐ'],
|
||||
'す': ['す'],
|
||||
'つ': ['つ'],
|
||||
'ぬ': ['ぬ'],
|
||||
'ぶ': ['ぶ'],
|
||||
'む': ['む'],
|
||||
'る': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegative = LemmatizationRule(
|
||||
name: 'Godan verb - negative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わない': ['う'],
|
||||
'かない': ['く'],
|
||||
'がない': ['ぐ'],
|
||||
'さない': ['す'],
|
||||
'たない': ['つ'],
|
||||
'なない': ['ぬ'],
|
||||
'ばない': ['ぶ'],
|
||||
'まない': ['む'],
|
||||
'らない': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbPast = LemmatizationRule(
|
||||
name: 'Godan verb - past form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'した': ['す'],
|
||||
'った': ['る', 'つ', 'う'],
|
||||
'んだ': ['む', 'ぬ', 'ぶ'],
|
||||
'いだ': ['ぐ'],
|
||||
'いた': ['く'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbTe = LemmatizationRule(
|
||||
name: 'Godan verb - te-form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いて': ['く', 'ぐ'],
|
||||
'して': ['す'],
|
||||
'って': ['る', 'つ', 'う'],
|
||||
'んで': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbTeiru = LemmatizationRule(
|
||||
name: 'Godan verb - te-form with いる',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いている': ['く', 'ぐ'],
|
||||
'している': ['す'],
|
||||
'っている': ['る', 'つ', 'う'],
|
||||
'んでいる': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbTeita = LemmatizationRule(
|
||||
name: 'Godan verb - te-form with いた',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いていた': ['く', 'ぐ'],
|
||||
'していた': ['す'],
|
||||
'っていた': ['る', 'つ', 'う'],
|
||||
'んでいた': ['む', 'ぬ', 'ぶ'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbConditional = LemmatizationRule(
|
||||
name: 'Godan verb - conditional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'けば': ['く'],
|
||||
'げば': ['ぐ'],
|
||||
'せば': ['す'],
|
||||
'てば': ['つ', 'る', 'う'],
|
||||
'ねば': ['ぬ'],
|
||||
'べば': ['ぶ'],
|
||||
'めば': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbVolitional = LemmatizationRule(
|
||||
name: 'Godan verb - volitional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'おう': ['う'],
|
||||
'こう': ['く'],
|
||||
'ごう': ['ぐ'],
|
||||
'そう': ['す'],
|
||||
'とう': ['つ', 'る', 'う'],
|
||||
'のう': ['ぬ'],
|
||||
'ぼう': ['ぶ'],
|
||||
'もう': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbPotential = LemmatizationRule(
|
||||
name: 'Godan verb - potential form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'ける': ['く'],
|
||||
'げる': ['ぐ'],
|
||||
'せる': ['す'],
|
||||
'てる': ['つ', 'る', 'う'],
|
||||
'ねる': ['ぬ'],
|
||||
'べる': ['ぶ'],
|
||||
'める': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbPassive = LemmatizationRule(
|
||||
name: 'Godan verb - passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かれる': ['く'],
|
||||
'がれる': ['ぐ'],
|
||||
'される': ['す'],
|
||||
'たれる': ['つ', 'る', 'う'],
|
||||
'なれる': ['ぬ'],
|
||||
'ばれる': ['ぶ'],
|
||||
'まれる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbCausative = LemmatizationRule(
|
||||
name: 'Godan verb - causative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かせる': ['く'],
|
||||
'がせる': ['ぐ'],
|
||||
'させる': ['す'],
|
||||
'たせる': ['つ', 'る', 'う'],
|
||||
'なせる': ['ぬ'],
|
||||
'ばせる': ['ぶ'],
|
||||
'ませる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbCausativePassive = LemmatizationRule(
|
||||
name: 'Godan verb - causative-passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かされる': ['く'],
|
||||
'がされる': ['ぐ'],
|
||||
'される': ['す'],
|
||||
'たされる': ['つ', 'る', 'う'],
|
||||
'なされる': ['ぬ'],
|
||||
'ばされる': ['ぶ'],
|
||||
'まされる': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbImperative = LemmatizationRule(
|
||||
name: 'Godan verb - imperative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'え': ['う'],
|
||||
'け': ['く'],
|
||||
'げ': ['ぐ'],
|
||||
'せ': ['す'],
|
||||
'て': ['つ', 'る', 'う'],
|
||||
'ね': ['ぬ'],
|
||||
'べ': ['ぶ'],
|
||||
'め': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativePast = LemmatizationRule(
|
||||
name: 'Godan verb - negative past form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなかった': ['う'],
|
||||
'かなかった': ['く'],
|
||||
'がなかった': ['ぐ'],
|
||||
'さなかった': ['す'],
|
||||
'たなかった': ['つ'],
|
||||
'ななかった': ['ぬ'],
|
||||
'ばなかった': ['ぶ'],
|
||||
'まなかった': ['む'],
|
||||
'らなかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeTe = LemmatizationRule(
|
||||
name: 'Godan verb - negative te-form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなくて': ['う'],
|
||||
'かなくて': ['く'],
|
||||
'がなくて': ['ぐ'],
|
||||
'さなくて': ['す'],
|
||||
'たなくて': ['つ'],
|
||||
'ななくて': ['ぬ'],
|
||||
'ばなくて': ['ぶ'],
|
||||
'まなくて': ['む'],
|
||||
'らなくて': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeConditional = LemmatizationRule(
|
||||
name: 'Godan verb - negative conditional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'わなければ': ['う'],
|
||||
'かなければ': ['く'],
|
||||
'がなければ': ['ぐ'],
|
||||
'さなければ': ['す'],
|
||||
'たなければ': ['つ'],
|
||||
'ななければ': ['ぬ'],
|
||||
'ばなければ': ['ぶ'],
|
||||
'まなければ': ['む'],
|
||||
'らなければ': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeVolitional = LemmatizationRule(
|
||||
name: 'Godan verb - negative volitional form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'うまい': ['う'],
|
||||
'くまい': ['く'],
|
||||
'ぐまい': ['ぐ'],
|
||||
'すまい': ['す'],
|
||||
'つまい': ['つ', 'る', 'う'],
|
||||
'ぬまい': ['ぬ'],
|
||||
'ぶまい': ['ぶ'],
|
||||
'むまい': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativePotential = LemmatizationRule(
|
||||
name: 'Godan verb - negative potential form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'けない': ['く'],
|
||||
'げない': ['ぐ'],
|
||||
'せない': ['す'],
|
||||
'てない': ['つ', 'る', 'う'],
|
||||
'ねない': ['ぬ'],
|
||||
'べない': ['ぶ'],
|
||||
'めない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativePassive = LemmatizationRule(
|
||||
name: 'Godan verb - negative passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かれない': ['く'],
|
||||
'がれない': ['ぐ'],
|
||||
'されない': ['す'],
|
||||
'たれない': ['つ', 'る', 'う'],
|
||||
'なれない': ['ぬ'],
|
||||
'ばれない': ['ぶ'],
|
||||
'まれない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeCausative = LemmatizationRule(
|
||||
name: 'Godan verb - negative causative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かせない': ['く'],
|
||||
'がせない': ['ぐ'],
|
||||
'させない': ['す'],
|
||||
'たせない': ['つ', 'る', 'う'],
|
||||
'なせない': ['ぬ'],
|
||||
'ばせない': ['ぶ'],
|
||||
'ませない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeCausativePassive = LemmatizationRule(
|
||||
name: 'Godan verb - negative causative-passive form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'かされない': ['く'],
|
||||
'がされない': ['ぐ'],
|
||||
'されない': ['す'],
|
||||
'たされない': ['つ', 'る', 'う'],
|
||||
'なされない': ['ぬ'],
|
||||
'ばされない': ['ぶ'],
|
||||
'まされない': ['む'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeImperative = LemmatizationRule(
|
||||
name: 'Godan verb - negative imperative form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'うな': ['う'],
|
||||
'くな': ['く'],
|
||||
'ぐな': ['ぐ'],
|
||||
'すな': ['す'],
|
||||
'つな': ['つ'],
|
||||
'ぬな': ['ぬ'],
|
||||
'ぶな': ['ぶ'],
|
||||
'むな': ['む'],
|
||||
'るな': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbDesire = LemmatizationRule(
|
||||
name: 'Godan verb - desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'きたい': ['く'],
|
||||
'ぎたい': ['ぐ'],
|
||||
'したい': ['す'],
|
||||
'ちたい': ['つ'],
|
||||
'にたい': ['ぬ'],
|
||||
'びたい': ['ぶ'],
|
||||
'みたい': ['む'],
|
||||
'りたい': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativeDesire = LemmatizationRule(
|
||||
name: 'Godan verb - negative desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いたくない': ['う'],
|
||||
'きたくない': ['く'],
|
||||
'ぎたくない': ['ぐ'],
|
||||
'したくない': ['す'],
|
||||
'ちたくない': ['つ'],
|
||||
'にたくない': ['ぬ'],
|
||||
'びたくない': ['ぶ'],
|
||||
'みたくない': ['む'],
|
||||
'りたくない': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbPastDesire = LemmatizationRule(
|
||||
name: 'Godan verb - past desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'きたかった': ['く'],
|
||||
'ぎたかった': ['ぐ'],
|
||||
'したかった': ['す'],
|
||||
'ちたかった': ['つ'],
|
||||
'にたかった': ['ぬ'],
|
||||
'びたかった': ['ぶ'],
|
||||
'みたかった': ['む'],
|
||||
'りたかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule godanVerbNegativePastDesire = LemmatizationRule(
|
||||
name: 'Godan verb - negative past desire form',
|
||||
pattern: AllomorphPattern(
|
||||
patterns: {
|
||||
'いたくなかった': ['う'],
|
||||
'きたくなかった': ['く'],
|
||||
'ぎたくなかった': ['ぐ'],
|
||||
'したくなかった': ['す'],
|
||||
'ちたくなかった': ['つ'],
|
||||
'にたくなかった': ['ぬ'],
|
||||
'びたくなかった': ['ぶ'],
|
||||
'みたくなかった': ['む'],
|
||||
'りたくなかった': ['る'],
|
||||
},
|
||||
type: LemmatizationRuleType.suffix,
|
||||
),
|
||||
validChildClasses: {WordClass.godanVerb},
|
||||
wordClass: WordClass.godanVerb,
|
||||
);
|
||||
|
||||
final List<LemmatizationRule> godanVerbLemmatizationRules = List.unmodifiable([
|
||||
godanVerbBase,
|
||||
godanVerbNegative,
|
||||
godanVerbPast,
|
||||
godanVerbTe,
|
||||
godanVerbTeiru,
|
||||
godanVerbTeita,
|
||||
godanVerbConditional,
|
||||
godanVerbVolitional,
|
||||
godanVerbPotential,
|
||||
godanVerbPassive,
|
||||
godanVerbCausative,
|
||||
godanVerbCausativePassive,
|
||||
godanVerbImperative,
|
||||
godanVerbNegativePast,
|
||||
godanVerbNegativeTe,
|
||||
godanVerbNegativeConditional,
|
||||
godanVerbNegativeVolitional,
|
||||
godanVerbNegativePotential,
|
||||
godanVerbNegativePassive,
|
||||
godanVerbNegativeCausative,
|
||||
godanVerbNegativeCausativePassive,
|
||||
godanVerbNegativeImperative,
|
||||
godanVerbDesire,
|
||||
godanVerbNegativeDesire,
|
||||
godanVerbPastDesire,
|
||||
godanVerbNegativePastDesire,
|
||||
]);
|
||||
@@ -1,61 +0,0 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
|
||||
List<LemmatizationRule> iAdjectiveLemmatizationRules = [
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - base form',
|
||||
terminal: true,
|
||||
pattern: 'い',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - negative form',
|
||||
pattern: 'くない',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - past form',
|
||||
pattern: 'かった',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - negative past form',
|
||||
pattern: 'くなかった',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - te-form',
|
||||
pattern: 'くて',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - conditional form',
|
||||
pattern: 'ければ',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - volitional form',
|
||||
pattern: 'かろう',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'I adjective - continuative form',
|
||||
pattern: 'く',
|
||||
replacement: 'い',
|
||||
validChildClasses: [WordClass.iAdjective],
|
||||
wordClass: WordClass.iAdjective,
|
||||
),
|
||||
];
|
||||
77
lib/util/lemmatizer/rules/i_adjectives.dart
Normal file
77
lib/util/lemmatizer/rules/i_adjectives.dart
Normal file
@@ -0,0 +1,77 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
|
||||
final LemmatizationRule iAdjectiveBase = LemmatizationRule.simple(
|
||||
name: 'I adjective - base form',
|
||||
terminal: true,
|
||||
pattern: 'い',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveNegative = LemmatizationRule.simple(
|
||||
name: 'I adjective - negative form',
|
||||
pattern: 'くない',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectivePast = LemmatizationRule.simple(
|
||||
name: 'I adjective - past form',
|
||||
pattern: 'かった',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveNegativePast = LemmatizationRule.simple(
|
||||
name: 'I adjective - negative past form',
|
||||
pattern: 'くなかった',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveTe = LemmatizationRule.simple(
|
||||
name: 'I adjective - te-form',
|
||||
pattern: 'くて',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveConditional = LemmatizationRule.simple(
|
||||
name: 'I adjective - conditional form',
|
||||
pattern: 'ければ',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveVolitional = LemmatizationRule.simple(
|
||||
name: 'I adjective - volitional form',
|
||||
pattern: 'かろう',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final LemmatizationRule iAdjectiveContinuative = LemmatizationRule.simple(
|
||||
name: 'I adjective - continuative form',
|
||||
pattern: 'く',
|
||||
replacement: 'い',
|
||||
validChildClasses: {WordClass.iAdjective},
|
||||
wordClass: WordClass.iAdjective,
|
||||
);
|
||||
|
||||
final List<LemmatizationRule> iAdjectiveLemmatizationRules = List.unmodifiable([
|
||||
iAdjectiveBase,
|
||||
iAdjectiveNegative,
|
||||
iAdjectivePast,
|
||||
iAdjectiveNegativePast,
|
||||
iAdjectiveTe,
|
||||
iAdjectiveConditional,
|
||||
iAdjectiveVolitional,
|
||||
iAdjectiveContinuative,
|
||||
]);
|
||||
@@ -1,241 +0,0 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
import 'package:jadb/util/text_filtering.dart';
|
||||
|
||||
List<Pattern> lookBehinds = [
|
||||
kanjiRegex,
|
||||
'き',
|
||||
'ぎ',
|
||||
'し',
|
||||
'じ',
|
||||
'ち',
|
||||
'ぢ',
|
||||
'に',
|
||||
'ひ',
|
||||
'び',
|
||||
'び',
|
||||
'み',
|
||||
'り',
|
||||
'け',
|
||||
'げ',
|
||||
'せ',
|
||||
'ぜ',
|
||||
'て',
|
||||
'で',
|
||||
'ね',
|
||||
'へ',
|
||||
'べ',
|
||||
'め',
|
||||
'れ',
|
||||
];
|
||||
|
||||
List<LemmatizationRule> ichidanVerbLemmatizationRules = [
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - base form',
|
||||
terminal: true,
|
||||
pattern: 'る',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative form',
|
||||
pattern: 'ない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - past form',
|
||||
pattern: 'た',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form',
|
||||
pattern: 'て',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form with いる',
|
||||
pattern: 'ている',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form with いた',
|
||||
pattern: 'ていた',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - conditional form',
|
||||
pattern: 'れば',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - volitional form',
|
||||
pattern: 'よう',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - potential form',
|
||||
pattern: 'られる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - passive form',
|
||||
pattern: 'られる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - causative form',
|
||||
pattern: 'させる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - causative passive form',
|
||||
pattern: 'させられる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - imperative form',
|
||||
pattern: 'れ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative past form',
|
||||
pattern: 'なかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative te-form',
|
||||
pattern: 'なくて',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative conditional form',
|
||||
pattern: 'なければ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative volitional form',
|
||||
pattern: 'なかろう',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative potential form',
|
||||
pattern: 'られない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative passive form',
|
||||
pattern: 'られない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative causative form',
|
||||
pattern: 'させない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative causative passive form',
|
||||
pattern: 'させられない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative imperative form',
|
||||
pattern: 'るな',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - desire form',
|
||||
pattern: 'たい',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative desire form',
|
||||
pattern: 'たくない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - past desire form',
|
||||
pattern: 'たかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative past desire form',
|
||||
pattern: 'たくなかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: lookBehinds,
|
||||
validChildClasses: [WordClass.ichidanVerb],
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
),
|
||||
];
|
||||
331
lib/util/lemmatizer/rules/ichidan_verbs.dart
Normal file
331
lib/util/lemmatizer/rules/ichidan_verbs.dart
Normal file
@@ -0,0 +1,331 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
import 'package:jadb/util/text_filtering.dart';
|
||||
|
||||
final List<Pattern> _lookBehinds = [
|
||||
kanjiRegex,
|
||||
'き',
|
||||
'ぎ',
|
||||
'し',
|
||||
'じ',
|
||||
'ち',
|
||||
'ぢ',
|
||||
'に',
|
||||
'ひ',
|
||||
'び',
|
||||
'び',
|
||||
'み',
|
||||
'り',
|
||||
'け',
|
||||
'げ',
|
||||
'せ',
|
||||
'ぜ',
|
||||
'て',
|
||||
'で',
|
||||
'ね',
|
||||
'へ',
|
||||
'べ',
|
||||
'め',
|
||||
'れ',
|
||||
];
|
||||
|
||||
final LemmatizationRule ichidanVerbBase = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - base form',
|
||||
terminal: true,
|
||||
pattern: 'る',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegative = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative form',
|
||||
pattern: 'ない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
final LemmatizationRule ichidanVerbPast = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - past form',
|
||||
pattern: 'た',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbTe = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form',
|
||||
pattern: 'て',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbTeiru = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form with いる',
|
||||
pattern: 'ている',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbTeita = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - te-form with いた',
|
||||
pattern: 'ていた',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbConditional = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - conditional form',
|
||||
pattern: 'れば',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbVolitional = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - volitional form',
|
||||
pattern: 'よう',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbPotential = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - potential form',
|
||||
pattern: 'られる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbPassive = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - passive form',
|
||||
pattern: 'られる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbCausative = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - causative form',
|
||||
pattern: 'させる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbCausativePassive = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - causative passive form',
|
||||
pattern: 'させられる',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbImperative = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - imperative form',
|
||||
pattern: 'れ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativePast = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative past form',
|
||||
pattern: 'なかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeTe = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative te-form',
|
||||
pattern: 'なくて',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeConditional =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative conditional form',
|
||||
pattern: 'なければ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeConditionalVariant1 =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative conditional form (informal variant)',
|
||||
pattern: 'なきゃ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeConditionalVariant2 =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative conditional form (informal variant)',
|
||||
pattern: 'なくちゃ',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeConditionalVariant3 =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative conditional form (informal variant)',
|
||||
pattern: 'ないと',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeVolitional =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative volitional form',
|
||||
pattern: 'なかろう',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativePotential = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative potential form',
|
||||
pattern: 'られない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativePassive = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative passive form',
|
||||
pattern: 'られない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeCausative = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative causative form',
|
||||
pattern: 'させない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeCausativePassive =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative causative passive form',
|
||||
pattern: 'させられない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeImperative =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative imperative form',
|
||||
pattern: 'るな',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbDesire = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - desire form',
|
||||
pattern: 'たい',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativeDesire = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative desire form',
|
||||
pattern: 'たくない',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbPastDesire = LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - past desire form',
|
||||
pattern: 'たかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final LemmatizationRule ichidanVerbNegativePastDesire =
|
||||
LemmatizationRule.simple(
|
||||
name: 'Ichidan verb - negative past desire form',
|
||||
pattern: 'たくなかった',
|
||||
replacement: 'る',
|
||||
lookAheadBehind: _lookBehinds,
|
||||
validChildClasses: {WordClass.ichidanVerb},
|
||||
wordClass: WordClass.ichidanVerb,
|
||||
);
|
||||
|
||||
final List<LemmatizationRule> ichidanVerbLemmatizationRules =
|
||||
List.unmodifiable([
|
||||
ichidanVerbBase,
|
||||
ichidanVerbNegative,
|
||||
ichidanVerbPast,
|
||||
ichidanVerbTe,
|
||||
ichidanVerbTeiru,
|
||||
ichidanVerbTeita,
|
||||
ichidanVerbConditional,
|
||||
ichidanVerbVolitional,
|
||||
ichidanVerbPotential,
|
||||
ichidanVerbPassive,
|
||||
ichidanVerbCausative,
|
||||
ichidanVerbCausativePassive,
|
||||
ichidanVerbImperative,
|
||||
ichidanVerbNegativePast,
|
||||
ichidanVerbNegativeTe,
|
||||
ichidanVerbNegativeConditional,
|
||||
ichidanVerbNegativeConditionalVariant1,
|
||||
ichidanVerbNegativeConditionalVariant2,
|
||||
ichidanVerbNegativeConditionalVariant3,
|
||||
ichidanVerbNegativeVolitional,
|
||||
ichidanVerbNegativePotential,
|
||||
ichidanVerbNegativePassive,
|
||||
ichidanVerbNegativeCausative,
|
||||
ichidanVerbNegativeCausativePassive,
|
||||
ichidanVerbNegativeImperative,
|
||||
ichidanVerbDesire,
|
||||
ichidanVerbNegativeDesire,
|
||||
ichidanVerbPastDesire,
|
||||
ichidanVerbNegativePastDesire,
|
||||
]);
|
||||
@@ -1,9 +1,9 @@
|
||||
// Source: https://github.com/Kimtaro/ve/blob/master/lib/providers/japanese_transliterators.rb
|
||||
|
||||
const hiragana_syllabic_n = 'ん';
|
||||
const hiragana_small_tsu = 'っ';
|
||||
const hiraganaSyllabicN = 'ん';
|
||||
const hiraganaSmallTsu = 'っ';
|
||||
|
||||
const Map<String, String> hiragana_to_latin = {
|
||||
const Map<String, String> hiraganaToLatin = {
|
||||
'あ': 'a',
|
||||
'い': 'i',
|
||||
'う': 'u',
|
||||
@@ -209,7 +209,7 @@ const Map<String, String> hiragana_to_latin = {
|
||||
'ゟ': 'yori',
|
||||
};
|
||||
|
||||
const Map<String, String> latin_to_hiragana = {
|
||||
const Map<String, String> latinToHiragana = {
|
||||
'a': 'あ',
|
||||
'i': 'い',
|
||||
'u': 'う',
|
||||
@@ -481,12 +481,13 @@ const Map<String, String> latin_to_hiragana = {
|
||||
'#~': '〜',
|
||||
};
|
||||
|
||||
bool _smallTsu(String forConversion) => forConversion == hiragana_small_tsu;
|
||||
bool _smallTsu(String forConversion) => forConversion == hiraganaSmallTsu;
|
||||
bool _nFollowedByYuYeYo(String forConversion, String kana) =>
|
||||
forConversion == hiragana_syllabic_n &&
|
||||
forConversion == hiraganaSyllabicN &&
|
||||
kana.length > 1 &&
|
||||
'やゆよ'.contains(kana.substring(1, 2));
|
||||
|
||||
/// Transliterates a string of hiragana characters to Latin script (romaji).
|
||||
String transliterateHiraganaToLatin(String hiragana) {
|
||||
String kana = hiragana;
|
||||
String romaji = '';
|
||||
@@ -505,7 +506,7 @@ String transliterateHiraganaToLatin(String hiragana) {
|
||||
} else if (_nFollowedByYuYeYo(forConversion, kana)) {
|
||||
mora = "n'";
|
||||
}
|
||||
mora ??= hiragana_to_latin[forConversion];
|
||||
mora ??= hiraganaToLatin[forConversion];
|
||||
|
||||
if (mora != null) {
|
||||
if (geminate) {
|
||||
@@ -524,15 +525,61 @@ String transliterateHiraganaToLatin(String hiragana) {
|
||||
return romaji;
|
||||
}
|
||||
|
||||
/// Returns a list of pairs of indices into the input and output strings,
|
||||
/// indicating which characters in the input string correspond to which characters in the output string.
|
||||
List<(int, int)> transliterateHiraganaToLatinSpan(String hiragana) {
|
||||
String kana = hiragana;
|
||||
String romaji = '';
|
||||
final List<(int, int)> spans = [];
|
||||
bool geminate = false;
|
||||
int kanaIndex = 0;
|
||||
|
||||
while (kana.isNotEmpty) {
|
||||
final lengths = [if (kana.length > 1) 2, 1];
|
||||
for (final length in lengths) {
|
||||
final String forConversion = kana.substring(0, length);
|
||||
String? mora;
|
||||
|
||||
if (_smallTsu(forConversion)) {
|
||||
geminate = true;
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
break;
|
||||
} else if (_nFollowedByYuYeYo(forConversion, kana)) {
|
||||
mora = "n'";
|
||||
}
|
||||
mora ??= hiraganaToLatin[forConversion];
|
||||
|
||||
if (mora != null) {
|
||||
if (geminate) {
|
||||
geminate = false;
|
||||
romaji += mora.substring(0, 1);
|
||||
}
|
||||
spans.add((kanaIndex, romaji.length));
|
||||
romaji += mora;
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
kanaIndex += length;
|
||||
break;
|
||||
} else if (length == 1) {
|
||||
spans.add((kanaIndex, romaji.length));
|
||||
romaji += forConversion;
|
||||
kana = kana.replaceRange(0, length, '');
|
||||
kanaIndex += length;
|
||||
}
|
||||
}
|
||||
}
|
||||
return spans;
|
||||
}
|
||||
|
||||
bool _doubleNFollowedByAIUEO(String forConversion) =>
|
||||
RegExp(r'^nn[aiueo]$').hasMatch(forConversion);
|
||||
bool _hasTableMatch(String forConversion) =>
|
||||
latin_to_hiragana[forConversion] != null;
|
||||
latinToHiragana[forConversion] != null;
|
||||
bool _hasDoubleConsonant(String forConversion, int length) =>
|
||||
forConversion == 'tch' ||
|
||||
(length == 2 &&
|
||||
RegExp(r'^([kgsztdnbpmyrlwchf])\1$').hasMatch(forConversion));
|
||||
|
||||
/// Transliterates a string of Latin script (romaji) to hiragana characters.
|
||||
String transliterateLatinToHiragana(String latin) {
|
||||
String romaji = latin
|
||||
.toLowerCase()
|
||||
@@ -549,12 +596,12 @@ String transliterateLatinToHiragana(String latin) {
|
||||
final String forConversion = romaji.substring(0, length);
|
||||
|
||||
if (_doubleNFollowedByAIUEO(forConversion)) {
|
||||
mora = hiragana_syllabic_n;
|
||||
mora = hiraganaSyllabicN;
|
||||
forRemoval = 1;
|
||||
} else if (_hasTableMatch(forConversion)) {
|
||||
mora = latin_to_hiragana[forConversion];
|
||||
mora = latinToHiragana[forConversion];
|
||||
} else if (_hasDoubleConsonant(forConversion, length)) {
|
||||
mora = hiragana_small_tsu;
|
||||
mora = hiraganaSmallTsu;
|
||||
forRemoval = 1;
|
||||
}
|
||||
|
||||
@@ -572,6 +619,53 @@ String transliterateLatinToHiragana(String latin) {
|
||||
return kana;
|
||||
}
|
||||
|
||||
/// Returns a list of pairs of indices into the input and output strings,
|
||||
/// indicating which characters in the input string correspond to which characters in the output string.
|
||||
List<(int, int)> transliterateLatinToHiraganaSpan(String latin) {
|
||||
String romaji = latin
|
||||
.toLowerCase()
|
||||
.replaceAll('mb', 'nb')
|
||||
.replaceAll('mp', 'np');
|
||||
String kana = '';
|
||||
final List<(int, int)> spans = [];
|
||||
int latinIndex = 0;
|
||||
|
||||
while (romaji.isNotEmpty) {
|
||||
final lengths = [if (romaji.length > 2) 3, if (romaji.length > 1) 2, 1];
|
||||
|
||||
for (final length in lengths) {
|
||||
String? mora;
|
||||
int forRemoval = length;
|
||||
final String forConversion = romaji.substring(0, length);
|
||||
|
||||
if (_doubleNFollowedByAIUEO(forConversion)) {
|
||||
mora = hiraganaSyllabicN;
|
||||
forRemoval = 1;
|
||||
} else if (_hasTableMatch(forConversion)) {
|
||||
mora = latinToHiragana[forConversion];
|
||||
} else if (_hasDoubleConsonant(forConversion, length)) {
|
||||
mora = hiraganaSmallTsu;
|
||||
forRemoval = 1;
|
||||
}
|
||||
|
||||
if (mora != null) {
|
||||
spans.add((latinIndex, kana.length));
|
||||
kana += mora;
|
||||
romaji = romaji.replaceRange(0, forRemoval, '');
|
||||
latinIndex += forRemoval;
|
||||
break;
|
||||
} else if (length == 1) {
|
||||
spans.add((latinIndex, kana.length));
|
||||
kana += forConversion;
|
||||
romaji = romaji.replaceRange(0, 1, '');
|
||||
latinIndex += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return spans;
|
||||
}
|
||||
|
||||
String _transposeCodepointsInRange(
|
||||
String text,
|
||||
int distance,
|
||||
@@ -583,15 +677,19 @@ String _transposeCodepointsInRange(
|
||||
),
|
||||
);
|
||||
|
||||
/// Transliterates a string of kana characters (hiragana or katakana) to Latin script (romaji).
|
||||
String transliterateKanaToLatin(String kana) =>
|
||||
transliterateHiraganaToLatin(transliterateKatakanaToHiragana(kana));
|
||||
|
||||
/// Transliterates a string of Latin script (romaji) to katakana characters.
|
||||
String transliterateLatinToKatakana(String latin) =>
|
||||
transliterateHiraganaToKatakana(transliterateLatinToHiragana(latin));
|
||||
|
||||
/// Transliterates a string of katakana characters to hiragana characters.
|
||||
String transliterateKatakanaToHiragana(String katakana) =>
|
||||
_transposeCodepointsInRange(katakana, -96, 12449, 12534);
|
||||
|
||||
/// Transliterates a string of hiragana characters to katakana characters.
|
||||
String transliterateHiraganaToKatakana(String hiragana) =>
|
||||
_transposeCodepointsInRange(hiragana, 96, 12353, 12438);
|
||||
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
CREATE TABLE "JMdict_Version" (
|
||||
"version" VARCHAR(10) PRIMARY KEY NOT NULL,
|
||||
"date" DATE NOT NULL,
|
||||
"hash" VARCHAR(64) NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TRIGGER "JMdict_Version_SingleRow"
|
||||
BEFORE INSERT ON "JMdict_Version"
|
||||
WHEN (SELECT COUNT(*) FROM "JMdict_Version") >= 1
|
||||
BEGIN
|
||||
SELECT RAISE(FAIL, 'Only one row allowed in JMdict_Version');
|
||||
END;
|
||||
|
||||
CREATE TABLE "JMdict_InfoDialect" (
|
||||
"id" VARCHAR(4) PRIMARY KEY NOT NULL,
|
||||
"description" TEXT NOT NULL
|
||||
@@ -40,19 +53,18 @@ CREATE TABLE "JMdict_Entry" (
|
||||
|
||||
CREATE TABLE "JMdict_KanjiElement" (
|
||||
"elementId" INTEGER PRIMARY KEY,
|
||||
"entryId" INTEGER NOT NULL REFERENCES "JMdict_Entry"("entryId"),
|
||||
"orderNum" INTEGER NOT NULL,
|
||||
"entryId" INTEGER NOT NULL GENERATED ALWAYS AS ("elementId" / 100) STORED,
|
||||
"orderNum" INTEGER NOT NULL GENERATED ALWAYS AS ("elementId" % 100) VIRTUAL,
|
||||
"reading" TEXT NOT NULL,
|
||||
"news" INTEGER CHECK ("news" BETWEEN 1 AND 2),
|
||||
"ichi" INTEGER CHECK ("ichi" BETWEEN 1 AND 2),
|
||||
"spec" INTEGER CHECK ("spec" BETWEEN 1 AND 2),
|
||||
"gai" INTEGER CHECK ("gai" BETWEEN 1 AND 2),
|
||||
"nf" INTEGER CHECK ("nf" BETWEEN 1 AND 48),
|
||||
UNIQUE("entryId", "reading"),
|
||||
UNIQUE("entryId", "orderNum")
|
||||
FOREIGN KEY ("entryId") REFERENCES "JMdict_Entry"("entryId"),
|
||||
UNIQUE("entryId", "reading")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE INDEX "JMdict_KanjiElement_byEntryId_byOrderNum" ON "JMdict_KanjiElement"("entryId", "orderNum");
|
||||
CREATE INDEX "JMdict_KanjiElement_byReading" ON "JMdict_KanjiElement"("reading");
|
||||
|
||||
CREATE TABLE "JMdict_KanjiElementInfo" (
|
||||
@@ -65,8 +77,8 @@ CREATE TABLE "JMdict_KanjiElementInfo" (
|
||||
|
||||
CREATE TABLE "JMdict_ReadingElement" (
|
||||
"elementId" INTEGER PRIMARY KEY,
|
||||
"entryId" INTEGER NOT NULL REFERENCES "JMdict_Entry"("entryId"),
|
||||
"orderNum" INTEGER NOT NULL,
|
||||
"entryId" INTEGER NOT NULL GENERATED ALWAYS AS (("elementId" / 100) % 10000000) STORED,
|
||||
"orderNum" INTEGER NOT NULL GENERATED ALWAYS AS ("elementId" % 100) VIRTUAL,
|
||||
"reading" TEXT NOT NULL,
|
||||
"readingDoesNotMatchKanji" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"news" INTEGER CHECK ("news" BETWEEN 1 AND 2),
|
||||
@@ -74,11 +86,10 @@ CREATE TABLE "JMdict_ReadingElement" (
|
||||
"spec" INTEGER CHECK ("spec" BETWEEN 1 AND 2),
|
||||
"gai" INTEGER CHECK ("gai" BETWEEN 1 AND 2),
|
||||
"nf" INTEGER CHECK ("nf" BETWEEN 1 AND 48),
|
||||
UNIQUE("entryId", "reading"),
|
||||
UNIQUE("entryId", "orderNum")
|
||||
FOREIGN KEY ("entryId") REFERENCES "JMdict_Entry"("entryId"),
|
||||
UNIQUE("entryId", "reading")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE INDEX "JMdict_ReadingElement_byEntryId_byOrderNum" ON "JMdict_ReadingElement"("entryId", "orderNum");
|
||||
CREATE INDEX "JMdict_ReadingElement_byReading" ON "JMdict_ReadingElement"("reading");
|
||||
|
||||
CREATE TABLE "JMdict_ReadingElementRestriction" (
|
||||
@@ -97,27 +108,22 @@ CREATE TABLE "JMdict_ReadingElementInfo" (
|
||||
|
||||
CREATE TABLE "JMdict_Sense" (
|
||||
"senseId" INTEGER PRIMARY KEY,
|
||||
"entryId" INTEGER NOT NULL REFERENCES "JMdict_Entry"("entryId"),
|
||||
"orderNum" INTEGER NOT NULL,
|
||||
"entryId" INTEGER NOT NULL GENERATED ALWAYS AS ("senseId" / 100) STORED,
|
||||
"orderNum" INTEGER NOT NULL GENERATED ALWAYS AS ("senseId" % 100) VIRTUAL,
|
||||
FOREIGN KEY ("entryId") REFERENCES "JMdict_Entry"("entryId"),
|
||||
UNIQUE("entryId", "orderNum")
|
||||
);
|
||||
|
||||
CREATE INDEX "JMdict_Sense_byEntryId_byOrderNum" ON "JMdict_Sense"("entryId", "orderNum");
|
||||
|
||||
CREATE TABLE "JMdict_SenseRestrictedToKanji" (
|
||||
"entryId" INTEGER NOT NULL,
|
||||
"senseId" INTEGER NOT NULL REFERENCES "JMdict_Sense"("senseId"),
|
||||
"kanji" TEXT NOT NULL,
|
||||
FOREIGN KEY ("entryId", "kanji") REFERENCES "JMdict_KanjiElement"("entryId", "reading"),
|
||||
PRIMARY KEY ("entryId", "senseId", "kanji")
|
||||
"kanjiElementId" INTEGER NOT NULL REFERENCES "JMdict_KanjiElement"("elementId"),
|
||||
PRIMARY KEY ("senseId", "kanjiElementId")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "JMdict_SenseRestrictedToReading" (
|
||||
"entryId" INTEGER NOT NULL,
|
||||
"senseId" INTEGER NOT NULL REFERENCES "JMdict_Sense"("senseId"),
|
||||
"reading" TEXT NOT NULL,
|
||||
FOREIGN KEY ("entryId", "reading") REFERENCES "JMdict_ReadingElement"("entryId", "reading"),
|
||||
PRIMARY KEY ("entryId", "senseId", "reading")
|
||||
"readingElementId" INTEGER NOT NULL REFERENCES "JMdict_ReadingElement"("elementId"),
|
||||
PRIMARY KEY ("senseId", "readingElementId")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
-- In order to add xrefs, you will need to have added the entry to xref to.
|
||||
@@ -139,9 +145,18 @@ CREATE TABLE "JMdict_SenseSeeAlso" (
|
||||
-- For some entries, the cross reference is ambiguous. This means that while the ingestion
|
||||
-- has determined some xrefEntryId, it is not guaranteed to be the correct one.
|
||||
"ambiguous" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
"seeAlsoSenseKey" INTEGER GENERATED ALWAYS AS (
|
||||
CASE
|
||||
WHEN "seeAlsoSense" IS NOT NULL THEN ("xrefEntryId" * 100) + "seeAlsoSense"
|
||||
ELSE NULL
|
||||
END
|
||||
) VIRTUAL,
|
||||
|
||||
FOREIGN KEY ("xrefEntryId", "seeAlsoKanji") REFERENCES "JMdict_KanjiElement"("entryId", "reading"),
|
||||
FOREIGN KEY ("xrefEntryId", "seeAlsoReading") REFERENCES "JMdict_ReadingElement"("entryId", "reading"),
|
||||
FOREIGN KEY ("xrefEntryId", "seeAlsoSense") REFERENCES "JMdict_Sense"("entryId", "orderNum"),
|
||||
FOREIGN KEY ("seeAlsoSenseKey") REFERENCES "JMdict_Sense"("senseId"),
|
||||
|
||||
UNIQUE("senseId", "xrefEntryId", "seeAlsoReading", "seeAlsoKanji", "seeAlsoSense")
|
||||
);
|
||||
|
||||
@@ -154,9 +169,18 @@ CREATE TABLE "JMdict_SenseAntonym" (
|
||||
-- For some entries, the cross reference is ambiguous. This means that while the ingestion
|
||||
-- has determined some xrefEntryId, it is not guaranteed to be the correct one.
|
||||
"ambiguous" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
"antonymSenseKey" INTEGER GENERATED ALWAYS AS (
|
||||
CASE
|
||||
WHEN "antonymSense" IS NOT NULL THEN ("xrefEntryId" * 100) + "antonymSense"
|
||||
ELSE NULL
|
||||
END
|
||||
) VIRTUAL,
|
||||
|
||||
FOREIGN KEY ("xrefEntryId", "antonymKanji") REFERENCES "JMdict_KanjiElement"("entryId", "reading"),
|
||||
FOREIGN KEY ("xrefEntryId", "antonymReading") REFERENCES "JMdict_ReadingElement"("entryId", "reading"),
|
||||
FOREIGN KEY ("xrefEntryId", "antonymSense") REFERENCES "JMdict_Sense"("entryId", "orderNum"),
|
||||
FOREIGN KEY ("antonymSenseKey") REFERENCES "JMdict_Sense"("senseId"),
|
||||
|
||||
UNIQUE("senseId", "xrefEntryId", "antonymReading", "antonymKanji", "antonymSense")
|
||||
);
|
||||
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
CREATE TABLE "JMdict_JLPT_Version" (
|
||||
"version" VARCHAR(10) PRIMARY KEY NOT NULL,
|
||||
"date" DATE NOT NULL,
|
||||
"hash" VARCHAR(64) NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TRIGGER "JMdict_JLPT_Version_SingleRow"
|
||||
BEFORE INSERT ON "JMdict_JLPT_Version"
|
||||
WHEN (SELECT COUNT(*) FROM "JMdict_JLPT_Version") >= 1
|
||||
BEGIN
|
||||
SELECT RAISE(FAIL, 'Only one row allowed in JMdict_JLPT_Version');
|
||||
END;
|
||||
|
||||
CREATE TABLE "JMdict_JLPTTag" (
|
||||
"entryId" INTEGER NOT NULL,
|
||||
"jlptLevel" CHAR(2) NOT NULL CHECK ("jlptLevel" in ('N5', 'N4', 'N3', 'N2', 'N1')),
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
CREATE TABLE "JMdict_EntryScore" (
|
||||
"type" CHAR(1) NOT NULL CHECK ("type" IN ('r', 'k')),
|
||||
"entryId" INTEGER NOT NULL REFERENCES "JMdict_Entry"("entryId"),
|
||||
"elementId" INTEGER NOT NULL,
|
||||
"elementId" INTEGER PRIMARY KEY,
|
||||
"score" INTEGER NOT NULL DEFAULT 0,
|
||||
"common" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
PRIMARY KEY ("type", "elementId")
|
||||
|
||||
"entryId" INTEGER NOT NULL GENERATED ALWAYS AS (("elementId" / 100) % 10000000) STORED,
|
||||
"type" CHAR(1) NOT NULL GENERATED ALWAYS AS (CASE
|
||||
WHEN "elementId" / 1000000000 = 0 THEN 'k'
|
||||
ELSE 'r'
|
||||
END) VIRTUAL,
|
||||
|
||||
FOREIGN KEY ("entryId") REFERENCES "JMdict_Entry"("entryId")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE INDEX "JMdict_EntryScore_byElementId_byScore" ON "JMdict_EntryScore"("elementId", "score");
|
||||
CREATE INDEX "JMdict_EntryScore_byScore" ON "JMdict_EntryScore"("score");
|
||||
CREATE INDEX "JMdict_EntryScore_byCommon" ON "JMdict_EntryScore"("common");
|
||||
|
||||
CREATE INDEX "JMdict_EntryScore_byType_byElementId_byScore" ON "JMdict_EntryScore"("type", "elementId", "score");
|
||||
CREATE INDEX "JMdict_EntryScore_byType_byScore" ON "JMdict_EntryScore"("type", "score");
|
||||
CREATE INDEX "JMdict_EntryScore_byType_byCommon" ON "JMdict_EntryScore"("type", "common");
|
||||
CREATE INDEX "JMdict_EntryScore_byElementId_byCommon" ON "JMdict_EntryScore"("elementId", "common");
|
||||
CREATE INDEX "JMdict_EntryScore_byCommon" ON "JMdict_EntryScore"("common");
|
||||
|
||||
-- NOTE: these views are deduplicated in order not to perform an unnecessary
|
||||
-- UNION on every trigger
|
||||
|
||||
CREATE VIEW "JMdict_EntryScoreView_Reading" AS
|
||||
SELECT
|
||||
'r' AS "type",
|
||||
"JMdict_ReadingElement"."entryId",
|
||||
"JMdict_ReadingElement"."elementId",
|
||||
(
|
||||
@@ -44,7 +46,7 @@ SELECT
|
||||
+ (("spec" IS 2) * 5)
|
||||
+ (("gai" IS 1) * 10)
|
||||
+ (("gai" IS 2) * 5)
|
||||
+ (("orderNum" IS 1) * 20)
|
||||
+ (("orderNum" IS 0) * 20)
|
||||
- (substr(COALESCE("JMdict_JLPTTag"."jlptLevel", 'N0'), 2) * -5)
|
||||
AS "score"
|
||||
FROM "JMdict_ReadingElement"
|
||||
@@ -52,7 +54,6 @@ LEFT JOIN "JMdict_JLPTTag" USING ("entryId");
|
||||
|
||||
CREATE VIEW "JMdict_EntryScoreView_Kanji" AS
|
||||
SELECT
|
||||
'k' AS "type",
|
||||
"JMdict_KanjiElement"."entryId",
|
||||
"JMdict_KanjiElement"."elementId",
|
||||
(
|
||||
@@ -76,7 +77,7 @@ SELECT
|
||||
+ (("spec" IS 2) * 5)
|
||||
+ (("gai" IS 1) * 10)
|
||||
+ (("gai" IS 2) * 5)
|
||||
+ (("orderNum" IS 1) * 20)
|
||||
+ (("orderNum" IS 0) * 20)
|
||||
- (substr(COALESCE("JMdict_JLPTTag"."jlptLevel", 'N0'), 2) * -5)
|
||||
AS "score"
|
||||
FROM "JMdict_KanjiElement"
|
||||
@@ -96,19 +97,17 @@ CREATE TRIGGER "JMdict_EntryScore_Insert_JMdict_ReadingElement"
|
||||
AFTER INSERT ON "JMdict_ReadingElement"
|
||||
BEGIN
|
||||
INSERT INTO "JMdict_EntryScore" (
|
||||
"type",
|
||||
"entryId",
|
||||
"elementId",
|
||||
"score",
|
||||
"common"
|
||||
)
|
||||
SELECT "type", "entryId", "elementId", "score", "common"
|
||||
SELECT "elementId", "score", "common"
|
||||
FROM "JMdict_EntryScoreView_Reading"
|
||||
WHERE "elementId" = NEW."elementId";
|
||||
END;
|
||||
|
||||
CREATE TRIGGER "JMdict_EntryScore_Update_JMdict_ReadingElement"
|
||||
AFTER UPDATE OF "news", "ichi", "spec", "gai", "nf", "orderNum"
|
||||
AFTER UPDATE OF "news", "ichi", "spec", "gai", "nf", "elementId"
|
||||
ON "JMdict_ReadingElement"
|
||||
BEGIN
|
||||
UPDATE "JMdict_EntryScore"
|
||||
@@ -123,8 +122,7 @@ CREATE TRIGGER "JMdict_EntryScore_Delete_JMdict_ReadingElement"
|
||||
AFTER DELETE ON "JMdict_ReadingElement"
|
||||
BEGIN
|
||||
DELETE FROM "JMdict_EntryScore"
|
||||
WHERE "type" = 'r'
|
||||
AND "elementId" = OLD."elementId";
|
||||
WHERE "elementId" = OLD."elementId";
|
||||
END;
|
||||
|
||||
--- JMdict_KanjiElement triggers
|
||||
@@ -133,19 +131,17 @@ CREATE TRIGGER "JMdict_EntryScore_Insert_JMdict_KanjiElement"
|
||||
AFTER INSERT ON "JMdict_KanjiElement"
|
||||
BEGIN
|
||||
INSERT INTO "JMdict_EntryScore" (
|
||||
"type",
|
||||
"entryId",
|
||||
"elementId",
|
||||
"score",
|
||||
"common"
|
||||
)
|
||||
SELECT "type", "entryId", "elementId", "score", "common"
|
||||
SELECT "elementId", "score", "common"
|
||||
FROM "JMdict_EntryScoreView_Kanji"
|
||||
WHERE "elementId" = NEW."elementId";
|
||||
END;
|
||||
|
||||
CREATE TRIGGER "JMdict_EntryScore_Update_JMdict_KanjiElement"
|
||||
AFTER UPDATE OF "news", "ichi", "spec", "gai", "nf", "orderNum"
|
||||
AFTER UPDATE OF "news", "ichi", "spec", "gai", "nf", "elementId"
|
||||
ON "JMdict_KanjiElement"
|
||||
BEGIN
|
||||
UPDATE "JMdict_EntryScore"
|
||||
@@ -160,8 +156,7 @@ CREATE TRIGGER "JMdict_EntryScore_Delete_JMdict_KanjiElement"
|
||||
AFTER DELETE ON "JMdict_KanjiElement"
|
||||
BEGIN
|
||||
DELETE FROM "JMdict_EntryScore"
|
||||
WHERE "type" = 'k'
|
||||
AND "elementId" = OLD."elementId";
|
||||
WHERE "elementId" = OLD."elementId";
|
||||
END;
|
||||
|
||||
--- JMdict_JLPTTag triggers
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
CREATE TABLE "RADKFILE_Version" (
|
||||
"version" VARCHAR(10) PRIMARY KEY NOT NULL,
|
||||
"date" DATE NOT NULL,
|
||||
"hash" VARCHAR(64) NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TRIGGER "RADKFILE_Version_SingleRow"
|
||||
BEFORE INSERT ON "RADKFILE_Version"
|
||||
WHEN (SELECT COUNT(*) FROM "RADKFILE_Version") >= 1
|
||||
BEGIN
|
||||
SELECT RAISE(FAIL, 'Only one row allowed in RADKFILE_Version');
|
||||
END;
|
||||
|
||||
CREATE TABLE "RADKFILE" (
|
||||
"kanji" CHAR(1) NOT NULL,
|
||||
"radical" CHAR(1) NOT NULL,
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
CREATE TABLE "KANJIDIC_Version" (
|
||||
"version" VARCHAR(10) PRIMARY KEY NOT NULL,
|
||||
"date" DATE NOT NULL,
|
||||
"hash" VARCHAR(64) NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TRIGGER "KANJIDIC_Version_SingleRow"
|
||||
BEFORE INSERT ON "KANJIDIC_Version"
|
||||
WHEN (SELECT COUNT(*) FROM "KANJIDIC_Version") >= 1
|
||||
BEGIN
|
||||
SELECT RAISE(FAIL, 'Only one row allowed in KANJIDIC_Version');
|
||||
END;
|
||||
|
||||
CREATE TABLE "KANJIDIC_Character" (
|
||||
"literal" CHAR(1) NOT NULL PRIMARY KEY,
|
||||
"grade" INTEGER CHECK ("grade" BETWEEN 1 AND 10),
|
||||
@@ -6,6 +19,21 @@ CREATE TABLE "KANJIDIC_Character" (
|
||||
"jlpt" INTEGER
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KANJIDIC_Grade" (
|
||||
"kanji" CHAR(1) NOT NULL PRIMARY KEY REFERENCES "KANJIDIC_Character"("literal"),
|
||||
"grade" INTEGER NOT NULL CHECK ("grade" BETWEEN 1 AND 10)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KANJIDIC_Frequency" (
|
||||
"kanji" CHAR(1) NOT NULL PRIMARY KEY REFERENCES "KANJIDIC_Character"("literal"),
|
||||
"frequency" INTEGER NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KANJIDIC_JLPT" (
|
||||
"kanji" CHAR(1) NOT NULL PRIMARY KEY REFERENCES "KANJIDIC_Character"("literal"),
|
||||
"jlpt" INTEGER NOT NULL CHECK ("jlpt" BETWEEN 1 AND 5)
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KANJIDIC_Codepoint" (
|
||||
"kanji" CHAR(1) NOT NULL REFERENCES "KANJIDIC_Character"("literal"),
|
||||
"type" VARCHAR(6) NOT NULL CHECK ("type" IN ('jis208', 'jis212', 'jis213', 'ucs')),
|
||||
|
||||
@@ -32,9 +32,9 @@ SELECT
|
||||
THEN "JMdict_ReadingElement"."reading"
|
||||
ELSE NULL
|
||||
END AS "furigana",
|
||||
COALESCE("JMdict_KanjiElement"."orderNum", 1)
|
||||
COALESCE("JMdict_KanjiElement"."orderNum", 0)
|
||||
+ "JMdict_ReadingElement"."orderNum"
|
||||
= 2
|
||||
= 0
|
||||
AS "isFirst",
|
||||
"JMdict_KanjiElement"."orderNum" AS "kanjiOrderNum",
|
||||
"JMdict_ReadingElement"."orderNum" AS "readingOrderNum"
|
||||
@@ -65,9 +65,7 @@ JOIN "JMdict_KanjiElement"
|
||||
ON "JMdict_KanjiElementFTS"."entryId" = "JMdict_KanjiElement"."entryId"
|
||||
AND "JMdict_KanjiElementFTS"."reading" LIKE '%' || "JMdict_KanjiElement"."reading"
|
||||
JOIN "JMdict_EntryScore"
|
||||
ON "JMdict_EntryScore"."type" = 'k'
|
||||
AND "JMdict_KanjiElement"."entryId" = "JMdict_EntryScore"."entryId"
|
||||
AND "JMdict_KanjiElement"."reading" = "JMdict_EntryScore"."reading"
|
||||
ON "JMdict_EntryScore"."elementId" = "JMdict_KanjiElement"."elementId"
|
||||
WHERE "JMdict_EntryScore"."common" = 1;
|
||||
|
||||
|
||||
|
||||
45
migrations/0011_KanjiVG.sql
Normal file
45
migrations/0011_KanjiVG.sql
Normal file
@@ -0,0 +1,45 @@
|
||||
CREATE TABLE "KanjiVG_Version" (
|
||||
"version" VARCHAR(10) PRIMARY KEY NOT NULL,
|
||||
"date" DATE NOT NULL,
|
||||
"hash" VARCHAR(64) NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TRIGGER "KanjiVG_Version_SingleRow"
|
||||
BEFORE INSERT ON "KanjiVG_Version"
|
||||
WHEN (SELECT COUNT(*) FROM "KanjiVG_Version") >= 1
|
||||
BEGIN
|
||||
SELECT RAISE(FAIL, 'Only one row allowed in KanjiVG_Version');
|
||||
END;
|
||||
|
||||
CREATE TABLE "KanjiVG_Entry" (
|
||||
"character" CHAR(1) PRIMARY KEY NOT NULL
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KanjiVG_StrokeNumber" (
|
||||
"character" CHAR(1) NOT NULL REFERENCES "KanjiVG_Entry"("character"),
|
||||
"strokeNum" INTEGER NOT NULL,
|
||||
"x" REAL NOT NULL,
|
||||
"y" REAL NOT NULL,
|
||||
PRIMARY KEY ("character", "strokeNum")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KanjiVG_Path" (
|
||||
"character" CHAR(1) NOT NULL REFERENCES "KanjiVG_Entry"("character"),
|
||||
"pathId" TEXT NOT NULL,
|
||||
"type" VARCHAR(10) NOT NULL,
|
||||
"svgPath" TEXT NOT NULL,
|
||||
PRIMARY KEY ("character", "pathId")
|
||||
) WITHOUT ROWID;
|
||||
|
||||
CREATE TABLE "KanjiVG_PathGroup" (
|
||||
"character" CHAR(1) NOT NULL REFERENCES "KanjiVG_Entry"("character"),
|
||||
"groupId" TEXT NOT NULL,
|
||||
"parentGroupId" TEXT REFERENCES "KanjiVG_PathGroup"("groupId"),
|
||||
"element" TEXT,
|
||||
"original" TEXT,
|
||||
"position" VARCHAR(10),
|
||||
"radical" TEXT,
|
||||
"part" INTEGER,
|
||||
PRIMARY KEY ("character", "groupId"),
|
||||
CHECK ("position" IN ('bottom', 'kamae', 'kamaec', 'left', 'middle', 'nyo', 'nyoc', 'right', 'tare', 'tarec', 'top') OR "position" IS NULL)
|
||||
) WITHOUT ROWID;
|
||||
@@ -6,6 +6,7 @@
|
||||
jmdict,
|
||||
radkfile,
|
||||
kanjidic2,
|
||||
tanos-jlpt,
|
||||
sqlite,
|
||||
wal ? false,
|
||||
}:
|
||||
@@ -21,10 +22,11 @@ stdenvNoCC.mkDerivation {
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
mkdir -p data/tmp
|
||||
ln -s "${jmdict}"/* data/tmp
|
||||
ln -s "${radkfile}"/* data/tmp
|
||||
ln -s "${kanjidic2}"/* data/tmp
|
||||
mkdir -p data
|
||||
ln -s '${jmdict}'/* data/
|
||||
ln -s '${radkfile}'/* data/
|
||||
ln -s '${kanjidic2}'/* data/
|
||||
ln -s '${tanos-jlpt}' data/tanos-jlpt
|
||||
|
||||
for migration in migrations/*.sql; do
|
||||
sqlite3 jadb.sqlite < "$migration"
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
{
|
||||
stdenvNoCC,
|
||||
jmdict-src,
|
||||
jmdict-with-examples-src,
|
||||
xmlformat,
|
||||
gzip,
|
||||
edrdgMetadata,
|
||||
}:
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "jmdict";
|
||||
|
||||
dontUnpack = true;
|
||||
srcs = [
|
||||
jmdict-src
|
||||
jmdict-with-examples-src
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
gzip
|
||||
xmlformat
|
||||
];
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
gzip -dkc "${jmdict-src}" > JMdict.xml
|
||||
gzip -dkc "${jmdict-with-examples-src}" > JMdict_with_examples.xml
|
||||
xmlformat -i JMdict.xml
|
||||
xmlformat -i JMdict_with_examples.xml
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
install -Dt "$out" JMdict.xml JMdict_with_examples.xml
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = edrdgMetadata // {
|
||||
description = "A Japanese-Multilingual Dictionary providing lexical data for japanese words";
|
||||
homepage = "https://www.edrdg.org/jmdict/j_jmdict.html";
|
||||
};
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
stdenvNoCC,
|
||||
kanjidic2-src,
|
||||
xmlformat,
|
||||
gzip,
|
||||
edrdgMetadata,
|
||||
}:
|
||||
stdenvNoCC.mkDerivation {
|
||||
name = "kanjidic2";
|
||||
|
||||
src = kanjidic2-src;
|
||||
dontUnpack = true;
|
||||
|
||||
nativeBuildInputs = [
|
||||
gzip
|
||||
xmlformat
|
||||
];
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
gzip -dkc "${kanjidic2-src}" > kanjidic2.xml
|
||||
xmlformat -i kanjidic2.xml
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
install -Dt "$out" kanjidic2.xml
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = edrdgMetadata // {
|
||||
description = "A consolidated XML-format kanji database";
|
||||
homepage = "https://www.edrdg.org/kanjidic/kanjd2index_legacy.html";
|
||||
};
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
stdenv,
|
||||
radkfile-src,
|
||||
gzip,
|
||||
iconv,
|
||||
edrdgMetadata,
|
||||
}:
|
||||
stdenv.mkDerivation {
|
||||
name = "radkfile";
|
||||
|
||||
src = radkfile-src;
|
||||
dontUnpack = true;
|
||||
|
||||
nativeBuildInputs = [
|
||||
gzip
|
||||
iconv
|
||||
];
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
gzip -dkc "$src" > radkfile
|
||||
iconv -f EUC-JP -t UTF-8 -o radkfile_utf8 radkfile
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
install -Dt "$out" radkfile_utf8
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = edrdgMetadata // {
|
||||
description = "A file providing searchable decompositions of kanji characters";
|
||||
homepage = "https://www.edrdg.org/krad/kradinf.html";
|
||||
};
|
||||
}
|
||||
56
pubspec.lock
56
pubspec.lock
@@ -5,18 +5,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: _fe_analyzer_shared
|
||||
sha256: "3b19a47f6ea7c2632760777c78174f47f6aec1e05f0cd611380d4593b8af1dbc"
|
||||
sha256: "8d718c5c58904f9937290fd5dbf2d6a0e02456867706bfb6cd7b81d394e738d5"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "96.0.0"
|
||||
version: "98.0.0"
|
||||
analyzer:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: analyzer
|
||||
sha256: "0c516bc4ad36a1a75759e54d5047cb9d15cded4459df01aa35a0b5ec7db2c2a0"
|
||||
sha256: "6141ad5d092d1e1d13929c0504658bbeccc1703505830d7c26e859908f5efc88"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "10.2.0"
|
||||
version: "12.0.0"
|
||||
args:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@@ -29,10 +29,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: async
|
||||
sha256: "758e6d74e971c3e5aceb4110bfd6698efc7f501675bcfe0c775459a8140750eb"
|
||||
sha256: e2eb0491ba5ddb6177742d2da23904574082139b07c1e33b8503b9f46f3e1a37
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.13.0"
|
||||
version: "2.13.1"
|
||||
benchmark_harness:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: benchmark_harness
|
||||
sha256: a2d3c4c83cac0126bf38e41eaf7bd9ed4f6635f1ee1a0cbc6f79fa9736c62cbd
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.4.0"
|
||||
boolean_selector:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -93,10 +101,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: csv
|
||||
sha256: bef2950f7a753eb82f894a2eabc3072e73cf21c17096296a5a992797e50b1d0d
|
||||
sha256: "2e0a52fb729f2faacd19c9c0c954ff450bba37aa8ab999410309e2342e7013a2"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "7.1.0"
|
||||
version: "8.0.0"
|
||||
equatable:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@@ -141,10 +149,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: hooks
|
||||
sha256: "7a08a0d684cb3b8fb604b78455d5d352f502b68079f7b80b831c62220ab0a4f6"
|
||||
sha256: e79ed1e8e1929bc6ecb6ec85f0cb519c887aa5b423705ded0d0f2d9226def388
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.0.1"
|
||||
version: "1.0.2"
|
||||
http_multi_server:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -189,18 +197,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: matcher
|
||||
sha256: "12956d0ad8390bbcc63ca2e1469c0619946ccb52809807067a7020d57e647aa6"
|
||||
sha256: dc0b7dc7651697ea4ff3e69ef44b0407ea32c487a39fff6a4004fa585e901861
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.12.18"
|
||||
version: "0.12.19"
|
||||
meta:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: meta
|
||||
sha256: "9f29b9bcc8ee287b1a31e0d01be0eae99a930dbffdaecf04b3f3d82a969f296f"
|
||||
sha256: df0c643f44ad098eb37988027a8e2b2b5a031fd3977f06bbfd3a76637e8df739
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.18.1"
|
||||
version: "1.18.2"
|
||||
mime:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -213,10 +221,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: native_toolchain_c
|
||||
sha256: "89e83885ba09da5fdf2cdacc8002a712ca238c28b7f717910b34bcd27b0d03ac"
|
||||
sha256: "6ba77bb18063eebe9de401f5e6437e95e1438af0a87a3a39084fbd37c90df572"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.17.4"
|
||||
version: "0.17.6"
|
||||
node_preamble:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -341,10 +349,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: sqlite3
|
||||
sha256: b7cf6b37667f6a921281797d2499ffc60fb878b161058d422064f0ddc78f6aa6
|
||||
sha256: caa693ad15a587a2b4fde093b728131a1827903872171089dedb16f7665d3a91
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.1.6"
|
||||
version: "3.2.0"
|
||||
stack_trace:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -389,26 +397,26 @@ packages:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: test
|
||||
sha256: "54c516bbb7cee2754d327ad4fca637f78abfc3cbcc5ace83b3eda117e42cd71a"
|
||||
sha256: "8d9ceddbab833f180fbefed08afa76d7c03513dfdba87ffcec2718b02bbcbf20"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.29.0"
|
||||
version: "1.31.0"
|
||||
test_api:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_api
|
||||
sha256: "93167629bfc610f71560ab9312acdda4959de4df6fac7492c89ff0d3886f6636"
|
||||
sha256: "949a932224383300f01be9221c39180316445ecb8e7547f70a41a35bf421fb9e"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.7.9"
|
||||
version: "0.7.11"
|
||||
test_core:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_core
|
||||
sha256: "394f07d21f0f2255ec9e3989f21e54d3c7dc0e6e9dbce160e5a9c1a6be0e2943"
|
||||
sha256: "1991d4cfe85d5043241acac92962c3977c8d2f2add1ee73130c7b286417d1d34"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.6.15"
|
||||
version: "0.6.17"
|
||||
typed_data:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
||||
@@ -9,7 +9,7 @@ environment:
|
||||
dependencies:
|
||||
args: ^2.7.0
|
||||
collection: ^1.19.0
|
||||
csv: ^7.1.0
|
||||
csv: ^8.0.0
|
||||
equatable: ^2.0.0
|
||||
path: ^1.9.1
|
||||
sqflite_common: ^2.5.0
|
||||
@@ -18,6 +18,7 @@ dependencies:
|
||||
xml: ^6.5.0
|
||||
|
||||
dev_dependencies:
|
||||
benchmark_harness: ^2.4.0
|
||||
lints: ^6.0.0
|
||||
test: ^1.25.15
|
||||
|
||||
|
||||
21
test/const_data/kanji_grades.dart
Normal file
21
test/const_data/kanji_grades.dart
Normal file
@@ -0,0 +1,21 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:jadb/const_data/kanji_grades.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('All constant kanji in jouyouKanjiByGrades are 2136 in total', () {
|
||||
expect(jouyouKanjiByGrades.values.flattenedToSet.length, 2136);
|
||||
});
|
||||
|
||||
// test('All constant kanji in jouyouKanjiByGrades are present in KANJIDIC2', () {
|
||||
|
||||
// });
|
||||
|
||||
// test('All constant kanji in jouyouKanjiByGrades have matching grade as in KANJIDIC2', () {
|
||||
|
||||
// });
|
||||
|
||||
// test('All constant kanji in jouyouKanjiByGradesAndStrokeCount have matching stroke count as in KANJIDIC2', () {
|
||||
|
||||
// });
|
||||
}
|
||||
17
test/const_data/radicals_test.dart
Normal file
17
test/const_data/radicals_test.dart
Normal file
@@ -0,0 +1,17 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:jadb/const_data/radicals.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('All constant radicals are 253 in total', () {
|
||||
expect(radicals.values.flattenedToSet.length, 253);
|
||||
});
|
||||
|
||||
// test('All constant radicals have at least 1 associated kanji in KANJIDIC2', () {
|
||||
|
||||
// });
|
||||
|
||||
// test('All constant radicals match the stroke order listed in KANJIDIC2', () {
|
||||
|
||||
// });
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:jadb/const_data/kanji_grades.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('Assert 2136 kanji in jouyou set', () {
|
||||
expect(JOUYOU_KANJI_BY_GRADES.values.flattenedToSet.length, 2136);
|
||||
});
|
||||
}
|
||||
@@ -1,21 +1,20 @@
|
||||
import 'dart:ffi';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:jadb/models/create_empty_db.dart';
|
||||
import 'package:jadb/search.dart';
|
||||
import 'package:sqflite_common_ffi/sqflite_ffi.dart';
|
||||
// import 'package:sqlite3/open.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
Future<DatabaseExecutor> setup_inmemory_database() async {
|
||||
final dbConnection = await createDatabaseFactoryFfi().openDatabase(':memory:');
|
||||
Future<DatabaseExecutor> setupInMemoryDatabase() async {
|
||||
final dbConnection = await createDatabaseFactoryFfi().openDatabase(
|
||||
':memory:',
|
||||
);
|
||||
|
||||
return dbConnection;
|
||||
}
|
||||
|
||||
void main() {
|
||||
test('Create empty db', () async {
|
||||
final db = await setup_inmemory_database();
|
||||
final db = await setupInMemoryDatabase();
|
||||
|
||||
await createEmptyDb(db);
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test('Filter kanji', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
|
||||
final result = await connection.filterKanji([
|
||||
'a',
|
||||
@@ -26,4 +26,27 @@ void main() {
|
||||
|
||||
expect(result.join(), '漢字地字');
|
||||
});
|
||||
|
||||
test('Filter kanji - deduplicate', () async {
|
||||
final connection = await setupDatabaseConnection();
|
||||
|
||||
final result = await connection.filterKanji([
|
||||
'a',
|
||||
'b',
|
||||
'c',
|
||||
'漢',
|
||||
'字',
|
||||
'地',
|
||||
'字',
|
||||
'か',
|
||||
'な',
|
||||
'.',
|
||||
'!',
|
||||
'@',
|
||||
';',
|
||||
'々',
|
||||
], deduplicate: true);
|
||||
|
||||
expect(result.join(), '漢字地');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6,16 +6,16 @@ import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test('Search a kanji', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
|
||||
final result = await connection.jadbSearchKanji('漢');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
group('Search all jouyou kanji', () {
|
||||
JOUYOU_KANJI_BY_GRADES.forEach((grade, characters) {
|
||||
jouyouKanjiByGrades.forEach((grade, characters) {
|
||||
test('Search all kanji in grade $grade', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
|
||||
for (final character in characters) {
|
||||
final result = await connection.jadbSearchKanji(character);
|
||||
|
||||
@@ -3,7 +3,7 @@ import 'dart:io';
|
||||
import 'package:jadb/_data_ingestion/open_local_db.dart';
|
||||
import 'package:sqflite_common/sqlite_api.dart';
|
||||
|
||||
Future<Database> setup_database_connection() async {
|
||||
Future<Database> setupDatabaseConnection() async {
|
||||
final libSqlitePath = Platform.environment['LIBSQLITE_PATH'];
|
||||
final jadbPath = Platform.environment['JADB_PATH'];
|
||||
|
||||
|
||||
@@ -5,43 +5,43 @@ import 'setup_database_connection.dart';
|
||||
|
||||
void main() {
|
||||
test('Search a word - english - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWord('kana');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Get word search count - english - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWordCount('kana');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Search a word - japanese kana - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWord('かな');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Get word search count - japanese kana - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWordCount('かな');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Search a word - japanese kanji - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWord('仮名');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Get word search count - japanese kanji - auto', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbSearchWordCount('仮名');
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
|
||||
test('Get a word by id', () async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
final result = await connection.jadbGetWordById(1577090);
|
||||
expect(result, isNotNull);
|
||||
});
|
||||
@@ -49,7 +49,7 @@ void main() {
|
||||
test(
|
||||
'Serialize all words',
|
||||
() async {
|
||||
final connection = await setup_database_connection();
|
||||
final connection = await setupDatabaseConnection();
|
||||
|
||||
// Test serializing all words
|
||||
for (final letter in 'aiueoksthnmyrw'.split('')) {
|
||||
|
||||
51
test/util/lemmatizer/lemmatizer_test.dart
Normal file
51
test/util/lemmatizer/lemmatizer_test.dart
Normal file
@@ -0,0 +1,51 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/godan_verbs.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules/ichidan_verbs.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
const List<String> ichidanVerbs = [
|
||||
'食べる',
|
||||
'食べた',
|
||||
'食べさせられた',
|
||||
'食べたい',
|
||||
'食べたくない',
|
||||
'食べたくなかった',
|
||||
];
|
||||
const List<String> godanVerbs = [
|
||||
'泳ぐ',
|
||||
'泳いだ',
|
||||
'泳げる',
|
||||
// '泳げれた',
|
||||
];
|
||||
|
||||
bool findRuleRecursively(Lemmatized result, LemmatizationRule expectedRule) {
|
||||
if (result.rule == expectedRule) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (final c in result.children) {
|
||||
if (findRuleRecursively(c, expectedRule)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void main() {
|
||||
group('Lemmatize Ichidan Verbs', () {
|
||||
for (final v in ichidanVerbs) {
|
||||
test('Lemmatize Ichidan Verb $v', () {
|
||||
expect(findRuleRecursively(lemmatize(v), ichidanVerbBase), true);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
group('Lemmatize Godan Verbs', () {
|
||||
for (final v in godanVerbs) {
|
||||
test('Lemmatize Godan Verb $v', () {
|
||||
expect(findRuleRecursively(lemmatize(v), godanVerbBase), true);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
14
test/util/lemmatizer/rules/godan_verbs_test.dart
Normal file
14
test/util/lemmatizer/rules/godan_verbs_test.dart
Normal file
@@ -0,0 +1,14 @@
|
||||
import 'package:jadb/util/lemmatizer/rules/godan_verbs.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('Test Godan Verb Base Rule', () {
|
||||
expect(godanVerbBase.matches('泳ぐ'), true);
|
||||
expect(godanVerbBase.apply('泳ぐ'), ['泳ぐ']);
|
||||
});
|
||||
|
||||
test('Test Godan Verb Negative Rule', () {
|
||||
expect(godanVerbNegative.matches('泳がない'), true);
|
||||
expect(godanVerbNegative.apply('泳がない'), ['泳ぐ']);
|
||||
});
|
||||
}
|
||||
15
test/util/lemmatizer/rules/i_adjectives_test.dart
Normal file
15
test/util/lemmatizer/rules/i_adjectives_test.dart
Normal file
@@ -0,0 +1,15 @@
|
||||
import 'package:jadb/util/lemmatizer/rules/i_adjectives.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('Test i-adjective Base Rule', () {
|
||||
expect(iAdjectiveBase.matches('怪しい'), true);
|
||||
expect(iAdjectiveBase.apply('怪しい'), ['怪しい']);
|
||||
});
|
||||
|
||||
|
||||
test('Test i-adjective Negative Rule', () {
|
||||
expect(iAdjectiveNegative.matches('怪しくない'), true);
|
||||
expect(iAdjectiveNegative.apply('怪しくない'), ['怪しい']);
|
||||
});
|
||||
}
|
||||
14
test/util/lemmatizer/rules/ichidan_verbs_test.dart
Normal file
14
test/util/lemmatizer/rules/ichidan_verbs_test.dart
Normal file
@@ -0,0 +1,14 @@
|
||||
import 'package:jadb/util/lemmatizer/rules/ichidan_verbs.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('Test Ichidan Verb Base Rule', () {
|
||||
expect(ichidanVerbBase.matches('食べる'), true);
|
||||
expect(ichidanVerbBase.apply('食べる'), ['食べる']);
|
||||
});
|
||||
|
||||
test('Test Ichidan Verb Negative Rule', () {
|
||||
expect(ichidanVerbNegative.matches('食べない'), true);
|
||||
expect(ichidanVerbNegative.apply('食べない'), ['食べる']);
|
||||
});
|
||||
}
|
||||
15
test/util/lemmatizer/rules_test.dart
Normal file
15
test/util/lemmatizer/rules_test.dart
Normal file
@@ -0,0 +1,15 @@
|
||||
import 'package:jadb/util/lemmatizer/lemmatizer.dart';
|
||||
import 'package:jadb/util/lemmatizer/rules.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
void main() {
|
||||
test('Assert lemmatizerRulesByWordClass is correct', () {
|
||||
for (final entry in lemmatizationRulesByWordClass.entries) {
|
||||
final WordClass wordClass = entry.key;
|
||||
final List<LemmatizationRule> rules = entry.value;
|
||||
for (final LemmatizationRule rule in rules) {
|
||||
expect(wordClass, rule.wordClass);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -37,6 +37,35 @@ void main() {
|
||||
});
|
||||
});
|
||||
|
||||
group('Romaji -> Hiragana Spans', () {
|
||||
void Function() expectSpans(String input, List<String> expected) => () {
|
||||
final result = transliterateLatinToHiraganaSpan(input);
|
||||
final trans = transliterateLatinToHiragana(input);
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
expect(
|
||||
trans.substring(
|
||||
result[i].$2,
|
||||
i == result.length - 1 ? trans.length : result[i + 1].$2,
|
||||
),
|
||||
expected[i],
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
test('Basic test', expectSpans('katamari', ['か', 'た', 'ま', 'り']));
|
||||
test(
|
||||
'Basic test with diacritics',
|
||||
expectSpans('gadamari', ['が', 'だ', 'ま', 'り']),
|
||||
);
|
||||
test('wi and we', expectSpans('wiwe', ['うぃ', 'うぇ']));
|
||||
test('nb = mb', expectSpans('kanpai', ['か', 'ん', 'ぱ', 'い']));
|
||||
test('nb = mb', expectSpans('kampai', ['か', 'ん', 'ぱ', 'い']));
|
||||
test('Double n', expectSpans('konnichiha', ['こ', 'ん', 'に', 'ち', 'は']));
|
||||
|
||||
// TODO: fix the implementation
|
||||
// test('Double consonant', expectSpans('kappa', ['か', 'っぱ']));
|
||||
});
|
||||
|
||||
group('Hiragana -> Romaji', () {
|
||||
test('Basic test', () {
|
||||
final result = transliterateHiraganaToLatin('かたまり');
|
||||
@@ -63,4 +92,31 @@ void main() {
|
||||
expect(result, 'kappa');
|
||||
});
|
||||
});
|
||||
|
||||
group('Hiragana -> Romaji Spans', () {
|
||||
void Function() expectSpans(String input, List<String> expected) => () {
|
||||
final result = transliterateHiraganaToLatinSpan(input);
|
||||
final trans = transliterateHiraganaToLatin(input);
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
expect(
|
||||
trans.substring(
|
||||
result[i].$2,
|
||||
i == result.length - 1 ? trans.length : result[i + 1].$2,
|
||||
),
|
||||
expected[i],
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
test('Basic test', expectSpans('かたまり', ['ka', 'ta', 'ma', 'ri']));
|
||||
test(
|
||||
'Basic test with diacritics',
|
||||
expectSpans('がだまり', ['ga', 'da', 'ma', 'ri']),
|
||||
);
|
||||
test('wi and we', expectSpans('うぃうぇ', ['whi', 'whe']));
|
||||
test('Double n', expectSpans('こんにちは', ['ko', 'n', 'ni', 'chi', 'ha']));
|
||||
|
||||
// TODO: fix the implementation
|
||||
// test('Double consonant', expectSpans('かっぱ', ['ka', 'ppa']));
|
||||
});
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user