initial
This commit is contained in:
238
src/main.rs
Normal file
238
src/main.rs
Normal file
@@ -0,0 +1,238 @@
|
||||
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
|
||||
use response_deserializer::{ChapterImages, SearchResult};
|
||||
|
||||
mod response_deserializer;
|
||||
mod select;
|
||||
mod util;
|
||||
|
||||
use response_deserializer::{Chapter, Id};
|
||||
use select::Entry;
|
||||
|
||||
const BASE: &str = "https://api.mangadex.org";
|
||||
type Client = ClientWithMiddleware;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let input = util::get_input("Enter search query: ");
|
||||
let retry_policy = ExponentialBackoff::builder().build_with_max_retries(3);
|
||||
let client = ClientBuilder::new(
|
||||
reqwest::Client::builder()
|
||||
.user_agent("Chrome/127")
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
|
||||
.build();
|
||||
let filters = [
|
||||
// ("publicationDemographic[]", "seinen"),
|
||||
//("status[]", "completed"),
|
||||
// ("contentRating[]", "suggestive"),
|
||||
];
|
||||
let results = search(&client, &input, &filters).await;
|
||||
let mut entries = vec![];
|
||||
for result in results.data.iter() {
|
||||
let mut entry = Entry::new(result.attributes.title.en.clone());
|
||||
if let Some(year) = result.attributes.year {
|
||||
entry.add_info("year", year);
|
||||
}
|
||||
let id = result.id.to_string();
|
||||
entry.add_info("id", &id);
|
||||
entry.add_info("status", result.attributes.status.to_string());
|
||||
entry.add_info(
|
||||
"content rating",
|
||||
result.attributes.content_rating.to_string(),
|
||||
);
|
||||
if let Some(chapters) = result.attributes.last_chapter {
|
||||
entry.add_info("chapters", chapters);
|
||||
}
|
||||
if let Some(volumes) = result.attributes.last_volume {
|
||||
entry.add_info("volumes", volumes);
|
||||
}
|
||||
if let Some(cover_data) = &result.relationships[2].attributes {
|
||||
let data = client
|
||||
.get(format!(
|
||||
"https://uploads.mangadex.org/covers/{id}/{}",
|
||||
&cover_data.file_name
|
||||
))
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.bytes()
|
||||
.await
|
||||
.unwrap();
|
||||
let result = util::convert_to_sixel(&data);
|
||||
|
||||
entry.add_image(result)
|
||||
}
|
||||
entries.push(entry);
|
||||
}
|
||||
let choice = select::select(&entries).unwrap();
|
||||
let choice_id = &results.data[choice as usize].id;
|
||||
let bonus = loop {
|
||||
match util::get_input("Read bonus chapters? [y/n] : ").as_str() {
|
||||
"y" | "yes" => break true,
|
||||
"n" | "no" => break false,
|
||||
_ => continue,
|
||||
}
|
||||
};
|
||||
let mut chapters = match get_chapters(&client, choice_id).await {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!("ERROR: {:#?}", e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
chapters.sort_by(|a, b| {
|
||||
a.attributes
|
||||
.chapter
|
||||
.unwrap_or(-1.)
|
||||
.partial_cmp(&b.attributes.chapter.unwrap_or(-1.))
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let selection_type = loop {
|
||||
match util::get_input("Select by volume or chapter? [v/c] : ").as_str() {
|
||||
"v" | "volume" => break util::SelectionType::Volume(util::choose_volumes()),
|
||||
"c" | "chapter" => break util::SelectionType::Chapter(util::choose_chapters()),
|
||||
_ => {
|
||||
eprintln!("Invalid input");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
let selected_chapters =
|
||||
util::get_chapters_from_selection(util::Selection::new(selection_type, bonus), &chapters);
|
||||
|
||||
let mut chapter_json_futures = vec![];
|
||||
for chapter in &selected_chapters {
|
||||
let chapter_id = &chapter.id;
|
||||
let client = &client;
|
||||
let future = async move {
|
||||
client
|
||||
.get(format!("{BASE}/at-home/server/{}", chapter_id))
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.text()
|
||||
.await
|
||||
.unwrap()
|
||||
};
|
||||
chapter_json_futures.push(future);
|
||||
}
|
||||
|
||||
let chapters_image_data: Vec<ChapterImages> = futures::future::join_all(chapter_json_futures)
|
||||
.await
|
||||
.iter()
|
||||
.map(|m| response_deserializer::deserialize_chapter_images(m))
|
||||
.collect();
|
||||
|
||||
let mut chapter_futures = vec![];
|
||||
for (i, image_data) in chapters_image_data.iter().enumerate() {
|
||||
chapter_futures.push(download_chapter_images(
|
||||
&client,
|
||||
image_data,
|
||||
selected_chapters[i],
|
||||
));
|
||||
}
|
||||
let chapters = futures::future::join_all(chapter_futures).await;
|
||||
|
||||
for (i, chapter) in chapters.iter().enumerate() {
|
||||
match chapter {
|
||||
Ok(chapter) => {
|
||||
for (j, image) in chapter.iter().enumerate() {
|
||||
image
|
||||
.save(format!("images/chapter{:0>3}_image_{:0>3}.png", i, j))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
panic!("{}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn download_chapter_images(
|
||||
client: &Client,
|
||||
image_data: &ChapterImages,
|
||||
chapter: &Chapter,
|
||||
) -> Result<Vec<image::DynamicImage>, reqwest_middleware::Error> {
|
||||
let mut data_futures = vec![];
|
||||
for (i, file_name) in image_data.chapter.data.iter().enumerate() {
|
||||
let base_url: &str = image_data.base_url.as_str();
|
||||
let hash: &str = image_data.chapter.hash.as_str();
|
||||
let future = async move {
|
||||
let data = client
|
||||
.clone()
|
||||
.get(format!("{base_url}/data/{hash}/{file_name}"))
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.bytes()
|
||||
.await
|
||||
.unwrap();
|
||||
println!(
|
||||
"Downloaded volume: {:?}, chapter: {:?}, title: {}, [{}/{}]",
|
||||
chapter.attributes.volume,
|
||||
chapter.attributes.chapter,
|
||||
chapter.attributes.title,
|
||||
i,
|
||||
chapter.attributes.pages
|
||||
);
|
||||
data
|
||||
};
|
||||
data_futures.push(future);
|
||||
}
|
||||
Ok(futures::future::join_all(data_futures)
|
||||
.await
|
||||
.iter()
|
||||
.map(|m| image::load_from_memory(m).unwrap())
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn get_chapters(client: &Client, id: &Id) -> Result<Vec<Chapter>, reqwest_middleware::Error> {
|
||||
let limit = 100;
|
||||
let limit = limit.to_string();
|
||||
let params = [("limit", limit.as_str()), ("translatedLanguage[]", "en")];
|
||||
let url = format!("{BASE}/manga/{id}/feed");
|
||||
let json = client.get(url).query(¶ms).send().await?.text().await?;
|
||||
let mut result = response_deserializer::deserialize_chapter_feed(&json);
|
||||
|
||||
let mut total_chapters_received = result.limit;
|
||||
while total_chapters_received < result.total {
|
||||
let offset = total_chapters_received.to_string();
|
||||
let params = [
|
||||
("limit", limit.as_str()),
|
||||
("translatedLanguage[]", "en"),
|
||||
("offset", offset.as_str()),
|
||||
];
|
||||
let url = format!("{BASE}/manga/{id}/feed");
|
||||
let json = client.get(url).query(¶ms).send().await?.text().await?;
|
||||
let mut new_result = response_deserializer::deserialize_chapter_feed(&json);
|
||||
result.data.append(&mut new_result.data);
|
||||
total_chapters_received += result.limit;
|
||||
}
|
||||
assert_eq!(result.data.len(), result.total as usize);
|
||||
Ok(result.data)
|
||||
}
|
||||
|
||||
async fn search(client: &Client, query: &str, filters: &[(&str, &str)]) -> SearchResult {
|
||||
let limit = 10;
|
||||
let params = [
|
||||
("title", query),
|
||||
("limit", &limit.to_string()),
|
||||
("includes[]", "cover_art"),
|
||||
];
|
||||
let json = client
|
||||
.get(format!("{BASE}/manga"))
|
||||
.query(¶ms)
|
||||
.query(filters)
|
||||
.send()
|
||||
.await
|
||||
.unwrap()
|
||||
.text()
|
||||
.await
|
||||
.unwrap();
|
||||
response_deserializer::deserializer(&json)
|
||||
}
|
||||
Reference in New Issue
Block a user