This commit is contained in:
2024-08-10 03:28:47 +02:00
commit 692cb43e9f
6 changed files with 1592 additions and 0 deletions

5
.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
/target
Cargo.lock
/images
*.jpg
*.png

17
Cargo.toml Normal file
View File

@@ -0,0 +1,17 @@
[package]
name = "manga-cli"
version = "0.1.0"
edition = "2021"
[dependencies]
chrono = "0.4.38"
crossterm = "0.28.1"
futures = "0.3.30"
icy_sixel = "0.1.2"
image = { version = "0.25.2", default-features = false, features = ["jpeg", "png"] }
reqwest = "0.12.5"
reqwest-middleware = "0.3.2"
reqwest-retry = "0.6.0"
serde = { version = "1.0.204", features = ["derive"] }
serde_json = "1.0.121"
tokio = { version = "1.39.2", default-features = false, features = ["macros", "rt-multi-thread"] }

238
src/main.rs Normal file
View File

@@ -0,0 +1,238 @@
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
use response_deserializer::{ChapterImages, SearchResult};
mod response_deserializer;
mod select;
mod util;
use response_deserializer::{Chapter, Id};
use select::Entry;
const BASE: &str = "https://api.mangadex.org";
type Client = ClientWithMiddleware;
#[tokio::main]
async fn main() {
let input = util::get_input("Enter search query: ");
let retry_policy = ExponentialBackoff::builder().build_with_max_retries(3);
let client = ClientBuilder::new(
reqwest::Client::builder()
.user_agent("Chrome/127")
.build()
.unwrap(),
)
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
.build();
let filters = [
// ("publicationDemographic[]", "seinen"),
//("status[]", "completed"),
// ("contentRating[]", "suggestive"),
];
let results = search(&client, &input, &filters).await;
let mut entries = vec![];
for result in results.data.iter() {
let mut entry = Entry::new(result.attributes.title.en.clone());
if let Some(year) = result.attributes.year {
entry.add_info("year", year);
}
let id = result.id.to_string();
entry.add_info("id", &id);
entry.add_info("status", result.attributes.status.to_string());
entry.add_info(
"content rating",
result.attributes.content_rating.to_string(),
);
if let Some(chapters) = result.attributes.last_chapter {
entry.add_info("chapters", chapters);
}
if let Some(volumes) = result.attributes.last_volume {
entry.add_info("volumes", volumes);
}
if let Some(cover_data) = &result.relationships[2].attributes {
let data = client
.get(format!(
"https://uploads.mangadex.org/covers/{id}/{}",
&cover_data.file_name
))
.send()
.await
.unwrap()
.bytes()
.await
.unwrap();
let result = util::convert_to_sixel(&data);
entry.add_image(result)
}
entries.push(entry);
}
let choice = select::select(&entries).unwrap();
let choice_id = &results.data[choice as usize].id;
let bonus = loop {
match util::get_input("Read bonus chapters? [y/n] : ").as_str() {
"y" | "yes" => break true,
"n" | "no" => break false,
_ => continue,
}
};
let mut chapters = match get_chapters(&client, choice_id).await {
Ok(v) => v,
Err(e) => {
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
};
chapters.sort_by(|a, b| {
a.attributes
.chapter
.unwrap_or(-1.)
.partial_cmp(&b.attributes.chapter.unwrap_or(-1.))
.unwrap()
});
let selection_type = loop {
match util::get_input("Select by volume or chapter? [v/c] : ").as_str() {
"v" | "volume" => break util::SelectionType::Volume(util::choose_volumes()),
"c" | "chapter" => break util::SelectionType::Chapter(util::choose_chapters()),
_ => {
eprintln!("Invalid input");
continue;
}
}
};
let selected_chapters =
util::get_chapters_from_selection(util::Selection::new(selection_type, bonus), &chapters);
let mut chapter_json_futures = vec![];
for chapter in &selected_chapters {
let chapter_id = &chapter.id;
let client = &client;
let future = async move {
client
.get(format!("{BASE}/at-home/server/{}", chapter_id))
.send()
.await
.unwrap()
.text()
.await
.unwrap()
};
chapter_json_futures.push(future);
}
let chapters_image_data: Vec<ChapterImages> = futures::future::join_all(chapter_json_futures)
.await
.iter()
.map(|m| response_deserializer::deserialize_chapter_images(m))
.collect();
let mut chapter_futures = vec![];
for (i, image_data) in chapters_image_data.iter().enumerate() {
chapter_futures.push(download_chapter_images(
&client,
image_data,
selected_chapters[i],
));
}
let chapters = futures::future::join_all(chapter_futures).await;
for (i, chapter) in chapters.iter().enumerate() {
match chapter {
Ok(chapter) => {
for (j, image) in chapter.iter().enumerate() {
image
.save(format!("images/chapter{:0>3}_image_{:0>3}.png", i, j))
.unwrap();
}
}
Err(e) => {
panic!("{}", e);
}
}
}
}
async fn download_chapter_images(
client: &Client,
image_data: &ChapterImages,
chapter: &Chapter,
) -> Result<Vec<image::DynamicImage>, reqwest_middleware::Error> {
let mut data_futures = vec![];
for (i, file_name) in image_data.chapter.data.iter().enumerate() {
let base_url: &str = image_data.base_url.as_str();
let hash: &str = image_data.chapter.hash.as_str();
let future = async move {
let data = client
.clone()
.get(format!("{base_url}/data/{hash}/{file_name}"))
.send()
.await
.unwrap()
.bytes()
.await
.unwrap();
println!(
"Downloaded volume: {:?}, chapter: {:?}, title: {}, [{}/{}]",
chapter.attributes.volume,
chapter.attributes.chapter,
chapter.attributes.title,
i,
chapter.attributes.pages
);
data
};
data_futures.push(future);
}
Ok(futures::future::join_all(data_futures)
.await
.iter()
.map(|m| image::load_from_memory(m).unwrap())
.collect())
}
async fn get_chapters(client: &Client, id: &Id) -> Result<Vec<Chapter>, reqwest_middleware::Error> {
let limit = 100;
let limit = limit.to_string();
let params = [("limit", limit.as_str()), ("translatedLanguage[]", "en")];
let url = format!("{BASE}/manga/{id}/feed");
let json = client.get(url).query(&params).send().await?.text().await?;
let mut result = response_deserializer::deserialize_chapter_feed(&json);
let mut total_chapters_received = result.limit;
while total_chapters_received < result.total {
let offset = total_chapters_received.to_string();
let params = [
("limit", limit.as_str()),
("translatedLanguage[]", "en"),
("offset", offset.as_str()),
];
let url = format!("{BASE}/manga/{id}/feed");
let json = client.get(url).query(&params).send().await?.text().await?;
let mut new_result = response_deserializer::deserialize_chapter_feed(&json);
result.data.append(&mut new_result.data);
total_chapters_received += result.limit;
}
assert_eq!(result.data.len(), result.total as usize);
Ok(result.data)
}
async fn search(client: &Client, query: &str, filters: &[(&str, &str)]) -> SearchResult {
let limit = 10;
let params = [
("title", query),
("limit", &limit.to_string()),
("includes[]", "cover_art"),
];
let json = client
.get(format!("{BASE}/manga"))
.query(&params)
.query(filters)
.send()
.await
.unwrap()
.text()
.await
.unwrap();
response_deserializer::deserializer(&json)
}

View File

@@ -0,0 +1,913 @@
// TODO: Remove this
#![allow(unused)]
use chrono::{DateTime, FixedOffset};
use serde::Deserialize;
use std::fmt::Display;
#[derive(Debug, Clone)]
pub struct Id(String);
#[derive(Debug)]
pub enum ResponseResult {
Ok,
}
#[derive(Debug)]
pub enum Language {
Turkish,
Korean,
SpanishLatinAmerican,
Hungarian,
BrazilianPortugese,
English,
Japanese,
JapaneseRomaji,
Italian,
Russian,
Indonesian,
Bulgarian,
Hebrew,
Spanish,
Esperanto,
Polish,
}
#[derive(Debug)]
pub enum Status {
Completed,
Ongoing,
Hiatus,
Cancelled,
}
#[derive(Debug)]
pub enum ContentRating {
Safe,
Suggestive,
Erotica,
Pornographic,
}
#[derive(Debug)]
pub enum PublicationDemographic {
Shounen,
Shoujo,
Seinen,
Josei,
}
#[derive(Debug)]
pub enum State {
Published,
}
#[derive(Debug)]
pub enum Response {
Collection,
}
#[derive(Debug)]
pub enum DataType {
Manga,
Chapter,
CoverArt,
Author,
Artist,
ScanlationGroup,
Tag,
User,
CustomList,
Creator,
}
#[derive(Debug)]
pub struct SearchResult {
pub result: ResponseResult,
pub response: Response,
pub data: Vec<Manga>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChapterImagesContent {
result: String,
base_url: String,
chapter: ChapterImageDataContent,
}
#[derive(Debug, Deserialize)]
struct ChapterImageDataContent {
hash: String,
data: Vec<String>,
}
pub struct ChapterImageData {
pub hash: String,
pub data: Vec<String>,
}
pub struct ChapterImages {
pub result: ResponseResult,
pub base_url: String,
pub chapter: ChapterImageData,
}
pub struct ChapterFeed {
pub result: ResponseResult,
pub response: Response,
pub data: Vec<Chapter>,
pub limit: u32,
pub offset: u32,
pub total: u32,
}
#[derive(Deserialize, Debug)]
struct ChapterFeedResponse {
result: String,
response: String,
data: Vec<ChapterContent>,
limit: u32,
offset: u32,
total: u32,
}
#[derive(Debug)]
pub struct MangaAttributes {
pub title: Titles,
pub alt_titles: Vec<AltTitles>,
pub description: Description,
pub is_locked: bool,
pub links: Option<Links>,
pub original_language: Language,
pub last_volume: Option<u32>,
pub last_chapter: Option<u32>,
pub publication_demographic: Option<PublicationDemographic>,
pub status: Status,
pub year: Option<u32>,
pub content_rating: ContentRating,
pub tags: Vec<Tag>,
pub state: State,
pub chapter_numbers_reset_on_new_volume: bool,
pub created_at: DateTime<FixedOffset>,
pub updated_at: DateTime<FixedOffset>,
pub version: u32,
pub available_translated_languages: Vec<Option<Language>>,
pub latest_uploaded_chapter: Option<Id>,
}
#[derive(Deserialize, Debug)]
struct ChapterContent {
id: String,
#[serde(rename = "type")]
type_name: String,
attributes: ChapterAttributesContent,
relationships: Vec<ContentRelationShip>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ChapterAttributesContent {
volume: Option<String>,
chapter: Option<String>,
title: String,
translated_language: String,
external_url: Option<String>,
publish_at: String,
readable_at: String,
created_at: String,
updated_at: String,
pages: u32,
version: u32,
}
pub struct Chapter {
pub id: Id,
pub data_type: DataType,
pub attributes: ChapterAttributes,
pub relationships: Vec<ChapterRelationShip>,
}
pub struct ChapterAttributes {
pub volume: Option<u32>,
pub chapter: Option<f32>,
pub title: String,
pub translated_language: Language,
pub external_url: Option<String>,
pub published_at: DateTime<FixedOffset>,
pub created_at: DateTime<FixedOffset>,
pub updated_at: DateTime<FixedOffset>,
pub pages: u32,
pub version: u32,
}
#[derive(Debug)]
pub struct Manga {
pub id: Id,
pub data_type: DataType,
pub attributes: MangaAttributes,
pub relationships: Vec<RelationShip>,
}
#[derive(Deserialize, Debug)]
struct SearchResponse {
result: String,
response: String,
data: Vec<ContentData>,
limit: u32,
offset: u32,
total: u32,
}
#[derive(Deserialize, Debug)]
struct ContentData {
id: String,
#[serde(rename = "type")]
type_name: String,
attributes: ContentAttributes,
relationships: Vec<ContentRelationShip>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ContentAttributes {
title: Titles,
alt_titles: Vec<AltTitles>,
description: Description,
is_locked: bool,
links: Option<Links>,
original_language: String,
last_volume: Option<String>,
last_chapter: Option<String>,
publication_demographic: Option<String>,
status: String,
year: Option<u32>,
content_rating: String,
tags: Vec<ContentTag>,
state: String,
chapter_numbers_reset_on_new_volume: bool,
created_at: String,
updated_at: String,
version: u32,
available_translated_languages: Vec<Option<String>>,
latest_uploaded_chapter: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
struct ContentCoverAttributes {
description: String,
volume: Option<String>,
file_name: String,
locale: String,
created_at: String,
updated_at: String,
version: u32,
}
#[derive(Debug)]
pub struct CoverAttributes {
pub description: String,
pub volume: Option<u32>,
pub file_name: Id,
pub locale: Language,
pub created_at: DateTime<FixedOffset>,
pub updated_at: DateTime<FixedOffset>,
pub version: u32,
}
#[derive(Deserialize, Debug, Clone)]
pub struct ContentTag {
id: String,
#[serde(rename = "type")]
type_name: String,
attributes: TagAttributes,
relationships: Vec<ContentRelationShip>,
}
#[derive(Debug)]
pub struct Tag {
pub id: Id,
pub data_type: DataType,
pub attributes: TagAttributes,
pub relationships: Vec<RelationShip>,
}
#[derive(Deserialize, Debug, Clone)]
struct ContentRelationShip {
id: String,
#[serde(rename = "type")]
type_name: String,
related: Option<String>,
attributes: Option<ContentCoverAttributes>,
}
#[derive(Debug)]
pub struct ChapterRelationShip {
id: Id,
data_type: DataType,
}
#[derive(Debug)] // TODO: Typo: Relationship
pub struct RelationShip {
pub id: Id,
pub data_type: DataType,
pub related: Option<String>,
pub attributes: Option<CoverAttributes>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct TagAttributes {
pub name: TagName,
pub description: Description,
pub group: String,
pub version: u32,
}
#[derive(Deserialize, Debug, Clone)]
pub struct TagName {
pub en: String,
}
#[derive(Deserialize, Debug, Default, Clone)]
pub struct Links {
al: Option<String>,
ap: Option<String>,
bw: Option<String>,
kt: Option<String>,
mu: Option<String>,
amz: Option<String>,
cdj: Option<String>,
ebj: Option<String>,
mal: Option<String>,
engtl: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Description {
en: Option<String>,
ru: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct AltTitles {
en: Option<String>,
ja: Option<String>,
ru: Option<String>,
}
#[derive(Deserialize, Debug, Clone)]
pub struct Titles {
pub en: String,
}
#[derive(Debug)]
enum ResponseConversionError {
AttributeError(AttributeConversionError),
Response(String),
Result(String),
ContentType(String),
}
impl TryInto<State> for &str {
type Error = ();
fn try_into(self) -> Result<State, ()> {
Ok(match self {
"published" => State::Published,
_ => return Err(()),
})
}
}
impl TryInto<ContentRating> for &str {
type Error = ();
fn try_into(self) -> Result<ContentRating, ()> {
Ok(match self {
"safe" => ContentRating::Safe,
"suggestive" => ContentRating::Suggestive,
"erotica" => ContentRating::Erotica,
"pornographic" => ContentRating::Pornographic,
_ => return Err(()),
})
}
}
impl TryInto<Language> for &str {
type Error = ();
fn try_into(self) -> Result<Language, ()> {
Ok(match self {
"ja" => Language::Japanese,
"ja-ro" => Language::JapaneseRomaji,
"en" => Language::English,
"ru" => Language::Russian,
"pt-br" => Language::BrazilianPortugese,
"tr" => Language::Turkish,
"it" => Language::Italian,
"es-la" => Language::SpanishLatinAmerican,
"hu" => Language::Hungarian,
"bg" => Language::Bulgarian,
"id" => Language::Indonesian,
"he" => Language::Hebrew,
"es" => Language::Spanish,
"eo" => Language::Esperanto,
"pl" => Language::Polish,
"ko" => Language::Korean,
_ => return Err(()),
})
}
}
impl TryInto<PublicationDemographic> for &str {
type Error = ();
fn try_into(self) -> Result<PublicationDemographic, ()> {
Ok(match self {
"shounen" => PublicationDemographic::Shounen,
"josei" => PublicationDemographic::Josei,
"shoujo" => PublicationDemographic::Shoujo,
"seinen" => PublicationDemographic::Seinen,
_ => return Err(()),
})
}
}
impl TryInto<DataType> for &str {
type Error = ();
fn try_into(self) -> Result<DataType, ()> {
Ok(match self {
"manga" => DataType::Manga,
"chapter" => DataType::Chapter,
"cover_art" => DataType::CoverArt,
"author" => DataType::Author,
"artist" => DataType::Artist,
"scanlation_group" => DataType::ScanlationGroup,
"tag" => DataType::Tag,
"user" => DataType::User,
"custom_list" => DataType::CustomList,
"creator" => DataType::Creator,
_ => return Err(()),
})
}
}
impl TryInto<Status> for &str {
type Error = ();
fn try_into(self) -> Result<Status, ()> {
Ok(match self {
"ongoing" => Status::Ongoing,
"completed" => Status::Completed,
"hiatus" => Status::Hiatus,
"cancelled" => Status::Cancelled,
_ => return Err(()),
})
}
}
impl TryInto<ResponseResult> for &str {
type Error = ();
fn try_into(self) -> Result<ResponseResult, ()> {
match self {
"ok" => Ok(ResponseResult::Ok),
_ => Err(()),
}
}
}
impl TryInto<Response> for &str {
type Error = ();
fn try_into(self) -> Result<Response, ()> {
match self {
"collection" => Ok(Response::Collection),
_ => Err(()),
}
}
}
fn convert_response_to_result(
search_response: SearchResponse,
) -> Result<SearchResult, ResponseConversionError> {
let response = (search_response.response.as_str())
.try_into()
.map_err(|_| ResponseConversionError::Result(search_response.response.clone()))?;
let result: ResponseResult = (search_response.result.as_str())
.try_into()
.map_err(|_| ResponseConversionError::Result({ search_response.result.clone() }))?;
let data: Vec<Manga> = search_response
.data
.iter()
.map(|m| {
Ok(Manga {
id: Id(m.id.clone()),
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
ResponseConversionError::AttributeError(AttributeConversionError::DataType(
m.type_name.clone(),
))
})?,
attributes: convert_attributes(&m.attributes)
.map_err(ResponseConversionError::AttributeError)?,
relationships: m
.relationships
.iter()
.map(|m| {
Ok(RelationShip {
id: Id(m.id.clone()),
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
AttributeConversionError::DataType(m.type_name.clone())
})?,
attributes: {
if let Some(attributes) = &m.attributes {
Some(CoverAttributes {
created_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| {
AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(),
)
})?,
updated_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| {
AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(),
)
})?,
// TODO: Something should probably be done here
description: String::new(),
file_name: Id(attributes.file_name.clone()),
locale: (attributes.locale.as_str()).try_into().map_err(
|_| {
AttributeConversionError::Locale(
attributes.locale.clone(),
)
},
)?,
version: attributes.version,
volume: match &attributes.volume {
Some(v) => v.parse().ok(),
None => None,
},
})
} else {
None
}
},
related: m.related.clone(),
})
})
.collect::<Result<Vec<RelationShip>, AttributeConversionError>>()
.map_err(ResponseConversionError::AttributeError)?,
})
})
.collect::<Result<Vec<Manga>, ResponseConversionError>>()?;
Ok(SearchResult {
response,
result,
data,
})
}
#[derive(Debug)]
enum AttributeConversionError {
Language(String),
Locale(String),
LastVolume(String),
LastChapter(String),
CreatedAtDateTime(String),
UpdatedAtDateTime(String),
State(String),
ContentRating(String),
Status(String),
PublicationDemographic(String),
DataType(String),
}
impl Display for Id {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Display for Status {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Ongoing => "ongoing".fmt(f),
Self::Completed => "completed".fmt(f),
Self::Cancelled => "cancelled".fmt(f),
Self::Hiatus => "hiatus".fmt(f),
}
}
}
impl Display for ContentRating {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Safe => "safe".fmt(f),
Self::Suggestive => "suggestive".fmt(f),
Self::Erotica => "erotica".fmt(f),
Self::Pornographic => "pornographic".fmt(f),
}
}
}
fn convert_attributes(
attributes: &ContentAttributes,
) -> Result<MangaAttributes, AttributeConversionError> {
Ok(MangaAttributes {
title: attributes.title.clone(),
alt_titles: attributes.alt_titles.clone(),
description: attributes.description.clone(),
is_locked: attributes.is_locked,
links: attributes.links.clone(),
original_language: (attributes.original_language.as_str())
.try_into()
.map_err(|_| {
AttributeConversionError::Language(attributes.original_language.clone())
})?,
last_volume: match attributes.last_volume.clone() {
Some(s) => match s.parse() {
Ok(v) => Some(v),
Err(_) => {
if s.is_empty() {
None
} else {
return Err(AttributeConversionError::LastVolume(s));
}
}
},
None => None,
},
last_chapter: match attributes.last_chapter.clone() {
Some(n) => match n.parse() {
Ok(v) => Some(v),
Err(_) => {
if n.is_empty() {
None
} else {
return Err(AttributeConversionError::LastVolume(n));
}
}
},
None => None,
},
publication_demographic: match attributes.publication_demographic.clone() {
Some(s) => Some(
(s.as_str())
.try_into()
.map_err(|_| AttributeConversionError::PublicationDemographic(s.clone()))?,
),
None => None,
},
status: (attributes.status.as_str())
.try_into()
.map_err(|_| AttributeConversionError::Status(attributes.status.clone()))?,
year: attributes.year,
content_rating: attributes.content_rating.as_str().try_into().map_err(|_| {
AttributeConversionError::ContentRating(attributes.content_rating.clone())
})?,
tags: attributes
.tags
.clone()
.iter()
.map(|m| {
Ok(Tag {
data_type: (m.type_name.as_str())
.try_into()
.map_err(|_| AttributeConversionError::DataType(m.type_name.clone()))?,
id: Id(m.id.clone()),
relationships: m
.relationships
.iter()
.map(|m| {
Ok(RelationShip {
id: Id(m.id.clone()),
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
AttributeConversionError::DataType(m.type_name.clone())
})?,
// TODO: Do this
attributes: None,
related: m.related.clone(),
})
})
.collect::<Result<Vec<RelationShip>, AttributeConversionError>>()?,
attributes: TagAttributes {
name: m.attributes.name.clone(),
group: m.attributes.group.clone(),
version: m.attributes.version,
description: Description {
en: m.attributes.description.en.clone(),
ru: m.attributes.description.ru.clone(),
},
},
})
})
.collect::<Result<Vec<Tag>, AttributeConversionError>>()?,
state: (attributes.state.as_str())
.try_into()
.map_err(|_| AttributeConversionError::State(attributes.state.clone()))?,
chapter_numbers_reset_on_new_volume: attributes.chapter_numbers_reset_on_new_volume,
created_at: DateTime::parse_from_rfc3339(&attributes.created_at).map_err(|_| {
AttributeConversionError::CreatedAtDateTime(attributes.created_at.clone())
})?,
updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at).map_err(|_| {
AttributeConversionError::UpdatedAtDateTime(attributes.created_at.clone())
})?,
version: attributes.version,
available_translated_languages: attributes
.available_translated_languages
.iter()
.map(|m| {
Ok(match m {
Some(s) => Some(
(s.as_str())
.try_into()
.map_err(|_| AttributeConversionError::Language(s.clone()))?,
),
None => None,
})
})
.collect::<Result<Vec<Option<Language>>, AttributeConversionError>>()?,
latest_uploaded_chapter: attributes
.latest_uploaded_chapter
.as_ref()
.map(|m| Id(m.clone())),
})
}
pub fn deserialize_chapter_feed(json: &str) -> ChapterFeed {
let chapter_feed_response: ChapterFeedResponse = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
};
convert_chapter_feed(chapter_feed_response).unwrap()
}
pub fn deserializer(json: &str) -> SearchResult {
let search_response: SearchResponse = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
};
let search_result = convert_response_to_result(search_response);
match search_result {
Ok(v) => v,
Err(e) => {
eprintln!("ERROR: Failed to convert search response: {:#?}", e);
std::process::exit(1);
}
}
}
#[derive(Debug)]
enum ChapterFeedConversionError {
Result(String),
Response(String),
Chapter(ChapterConversionError),
}
#[derive(Debug)]
enum ChapterConversionError {
DataType(String),
Id(String),
RelationShip(ChapterRelationShipError),
Attributes(ChapterAttributeConversionError),
}
#[derive(Debug)]
enum ChapterRelationShipError {
TypeData(String),
Id(String),
}
fn convert_chapter_feed(
feed: ChapterFeedResponse,
) -> Result<ChapterFeed, ChapterFeedConversionError> {
Ok(ChapterFeed {
result: (feed.result.as_str())
.try_into()
.map_err(|_| ChapterFeedConversionError::Result(feed.result.clone()))?,
response: (feed.response.as_str())
.try_into()
.map_err(|_| ChapterFeedConversionError::Result(feed.response.clone()))?,
data: feed
.data
.iter()
.map(|m| {
Ok(Chapter {
data_type: (m.type_name.as_str())
.try_into()
.map_err(|_| ChapterConversionError::DataType(m.type_name.clone()))?,
id: Id(m.id.clone()),
attributes: convert_chapter_attributes(&m.attributes)
.map_err(ChapterConversionError::Attributes)?,
relationships: m
.relationships
.iter()
.map(|m| {
Ok(ChapterRelationShip {
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
ChapterRelationShipError::TypeData(m.type_name.clone())
})?,
id: Id(m.id.clone()),
})
})
.collect::<Result<Vec<ChapterRelationShip>, ChapterRelationShipError>>()
.map_err(ChapterConversionError::RelationShip)?,
})
})
.collect::<Result<Vec<Chapter>, ChapterConversionError>>()
.map_err(ChapterFeedConversionError::Chapter)?,
limit: feed.limit,
offset: feed.offset,
total: feed.total,
})
}
#[derive(Debug)]
enum ChapterAttributeConversionError {
Volume(String),
Chapter(String),
CreatedAt(String),
UpdatedAt(String),
PublishedAt(String),
TranslatedLanguage(String),
}
fn convert_chapter_attributes(
attributes: &ChapterAttributesContent,
) -> Result<ChapterAttributes, ChapterAttributeConversionError> {
Ok(ChapterAttributes {
volume: match &attributes.volume {
Some(v) => match v.parse() {
Ok(n) => Some(n),
Err(_) => return Err(ChapterAttributeConversionError::Volume(v.to_owned())),
},
None => None,
},
chapter: match &attributes.chapter {
Some(v) => match v.parse() {
Ok(v) => Some(v),
Err(_) => return Err(ChapterAttributeConversionError::Chapter(v.to_owned())),
},
None => None,
},
created_at: DateTime::parse_from_rfc3339(&attributes.created_at).map_err(|_| {
ChapterAttributeConversionError::CreatedAt(attributes.created_at.clone())
})?,
published_at: DateTime::parse_from_rfc3339(&attributes.publish_at).map_err(|_| {
ChapterAttributeConversionError::CreatedAt(attributes.publish_at.clone())
})?,
updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at).map_err(|_| {
ChapterAttributeConversionError::CreatedAt(attributes.updated_at.clone())
})?,
external_url: attributes.external_url.clone(),
title: attributes.title.clone(),
pages: attributes.pages,
translated_language: (attributes.translated_language.as_str())
.try_into()
.map_err(|_| {
ChapterAttributeConversionError::TranslatedLanguage(
attributes.translated_language.clone(),
)
})?,
version: attributes.version,
})
}
#[derive(Debug)]
enum ChapterImageError {
Result(String),
}
fn convert_chapter_images(data: ChapterImagesContent) -> Result<ChapterImages, ChapterImageError> {
Ok(ChapterImages {
result: (data.result.as_str())
.try_into()
.map_err(|_| ChapterImageError::Result(data.result.clone()))?,
base_url: data.base_url,
chapter: ChapterImageData {
hash: data.chapter.hash,
data: data.chapter.data,
},
})
}
pub fn deserialize_chapter_images(json: &str) -> ChapterImages {
let chapter_images: ChapterImagesContent = match serde_json::from_str(json) {
Ok(v) => v,
Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
};
convert_chapter_images(chapter_images).unwrap()
}

164
src/select.rs Normal file
View File

@@ -0,0 +1,164 @@
use crossterm::{
cursor::{Hide, MoveTo, Show},
event,
event::{Event, KeyCode},
terminal,
terminal::{Clear, ClearType},
QueueableCommand,
};
use std::fmt::Display;
use std::time::Duration;
use std::{
io,
io::{Stdout, Write},
};
const CURRENT: char = '>';
const NON_CURRENT: char = ' ';
enum Action {
MoveDown,
MoveUp,
Select,
}
#[derive(Default)]
pub struct Entry {
title: String,
info: Vec<(String, String)>,
image: Option<String>,
}
impl Entry {
pub fn new(title: String) -> Self {
Self {
title,
..Default::default()
}
}
// Making the Entry fields private and adding this method makes it so that data is only added,
// not removed.
pub fn add_info<T: Display>(&mut self, key: &str, value: T) {
self.info.push((key.to_owned(), value.to_string()));
}
pub fn add_image(&mut self, sixel_data: String) {
self.image = Some(sixel_data);
}
}
fn get_input() -> Option<Action> {
match event::poll(Duration::MAX) {
Ok(true) => {
let event = event::read();
match event {
Ok(Event::Key(k)) => Some(match k.code {
KeyCode::Char('j') => Action::MoveDown,
KeyCode::Char('k') => Action::MoveUp,
KeyCode::Enter => Action::Select,
KeyCode::Char('q') => exit(),
_ => return None,
}),
Err(e) => {
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
_ => None,
}
}
Ok(false) => None,
Err(e) => {
eprintln!("ERROR: {:#?}", e);
std::process::exit(1);
}
}
}
fn exit() -> ! {
io::stdout().queue(Show).unwrap().flush().unwrap();
terminal::disable_raw_mode().unwrap();
std::process::exit(1);
}
// pub fn multi_select(entries: &[Entry]) -> Result<Vec<u16>, std::io::Error> {
// }
pub fn select(entries: &[Entry]) -> Result<u16, std::io::Error> {
let (width, height) = terminal::size()?;
let mut stdout = io::stdout();
stdout.queue(Hide)?;
let mut selected: u16 = 0;
let offset = width / 3;
let mut should_render = true;
loop {
if should_render {
render(&mut stdout, entries, selected, offset)?;
should_render = false;
}
terminal::enable_raw_mode()?;
let input = get_input();
terminal::disable_raw_mode()?;
if let Some(m) = input {
match m {
Action::MoveDown => {
if selected <= (height - 1).min((entries.len() - 2) as u16) {
selected += 1;
should_render = true;
}
}
Action::MoveUp => {
if selected < 1 {
selected = 0;
} else {
selected -= 1;
should_render = true;
}
}
Action::Select => {
stdout
.queue(MoveTo(0, 0))?
.queue(Clear(ClearType::All))?
.queue(Show)?
.flush()?;
return Ok(selected);
}
}
stdout.queue(MoveTo(0, selected))?;
}
stdout.flush()?;
}
}
fn render(
stdout: &mut Stdout,
entries: &[Entry],
selected: u16,
offset: u16,
) -> Result<(), io::Error> {
stdout.queue(MoveTo(0, 0))?.queue(Clear(ClearType::All))?;
for (i, entry) in entries.iter().enumerate() {
stdout
.queue(MoveTo(0, i as u16))?
.write_all(if i == selected as usize {
&[CURRENT as u8]
} else {
&[NON_CURRENT as u8]
})?;
stdout.write_all(entry.title.as_bytes())?;
}
if let Some(sixel_data) = &entries[selected as usize].image {
stdout
.queue(MoveTo(offset * 2, 0))?
.write_all(sixel_data.as_bytes())?;
}
for (i, line) in entries[selected as usize].info.iter().enumerate() {
stdout
.queue(MoveTo(offset, i as u16))?
.write_all(format!("{}: {}", line.0, line.1).as_bytes())?;
}
stdout.queue(MoveTo(0, selected))?.flush()?;
Ok(())
}

255
src/util.rs Normal file
View File

@@ -0,0 +1,255 @@
use crate::Chapter;
use icy_sixel::{DiffusionMethod, MethodForLargest, MethodForRep, PixelFormat, Quality};
use std::{io, io::Write};
pub struct Selection {
pub selection_type: SelectionType,
pub bonus: bool, // Allows including or excluding bonus chapters and volumes
}
impl Selection {
pub fn new(selection_type: SelectionType, bonus: bool) -> Self {
Self {
selection_type,
bonus,
}
}
}
pub enum SelectionType {
Volume(VolumeSelection),
Chapter(ChapterSelection),
}
pub enum VolumeSelection {
All,
Range(u32, u32),
List(Vec<u32>),
Single(u32),
}
pub enum ChapterSelection {
Range(f32, f32),
List(Vec<f32>),
All,
Single(f32),
}
fn filter_bonus(bonus: bool, volume: Option<u32>, chapter: Option<f32>) -> bool {
if bonus {
return true;
}
if let Some(volume) = volume {
if volume > 0 {
if let Some(chapter) = chapter {
return chapter.round() == chapter;
}
}
}
false
}
pub fn get_chapters_from_selection(selection: Selection, chapters: &[Chapter]) -> Vec<&Chapter> {
let bonus = selection.bonus;
match selection.selection_type {
SelectionType::Volume(v) => match v {
VolumeSelection::All => chapters
.iter()
.filter(|m| filter_bonus(bonus, m.attributes.volume, m.attributes.chapter))
.collect(),
VolumeSelection::Single(a) => chapters
.iter()
.filter(|m| {
if let Some(n) = m.attributes.volume {
return a == n
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
VolumeSelection::List(list) => chapters
.iter()
.filter(|m| {
if let Some(v) = m.attributes.volume {
return list.contains(&v)
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
VolumeSelection::Range(a, b) => chapters
.iter()
.filter(|m| {
if let Some(v) = m.attributes.volume {
return v >= a
&& v <= b
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
},
SelectionType::Chapter(c) => match c {
ChapterSelection::All => chapters
.iter()
.filter(|m| filter_bonus(bonus, m.attributes.volume, m.attributes.chapter))
.collect(),
ChapterSelection::Single(a) => chapters
.iter()
.filter(|m| {
if let Some(c) = m.attributes.chapter {
return a == c
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
ChapterSelection::List(list) => chapters
.iter()
.filter(|m| {
if let Some(n) = m.attributes.chapter {
return list.contains(&n)
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
ChapterSelection::Range(a, b) => chapters
.iter()
.filter(|m| {
if let Some(c) = m.attributes.chapter {
return c >= a
&& c <= b
&& filter_bonus(bonus, m.attributes.volume, m.attributes.chapter);
}
false
})
.collect(),
},
}
}
pub fn choose_volumes() -> VolumeSelection {
let input = get_input("Choose volumes: ");
if let Some(x) = input.find("..") {
match (input[0..x].parse(), input[x + 2..].parse::<u32>()) {
(Ok(a), Ok(b)) => {
if a > b {
eprintln!("Invalid range: a > b");
choose_volumes()
} else {
VolumeSelection::Range(a, b)
}
}
_ => {
eprintln!("Invalid range");
choose_volumes()
}
}
} else if let Some(x) = input.find(":") {
match (input[0..x].parse(), input[x + 2..].parse::<u32>()) {
(Ok(a), Ok(b)) => VolumeSelection::Range(a, b),
_ => {
eprintln!("Invalid range");
choose_volumes()
}
}
} else if input.contains(",") {
match input
.split(",")
.map(|m| m.parse::<u32>())
.collect::<Result<Vec<u32>, _>>()
{
Ok(v) => VolumeSelection::List(v),
Err(e) => {
eprintln!("Invalid number in list: {:#?}", e);
choose_volumes()
}
}
} else if input.as_str() == "all" {
VolumeSelection::All
} else {
if let Ok(n) = input.parse() {
return VolumeSelection::Single(n);
}
eprintln!("Invalid input");
choose_volumes()
}
}
pub fn choose_chapters() -> ChapterSelection {
let input = get_input("Choose chapters: ");
if let Some(x) = input.find("..") {
match (input[0..x].parse(), input[x + 2..].parse()) {
// Inclusive range
(Ok(a), Ok(b)) => {
if a > b {
eprintln!("Invalid range: a > b");
choose_chapters()
} else {
ChapterSelection::Range(a, b)
}
}
_ => {
eprintln!("Invalid range");
choose_chapters()
}
}
} else if input.contains(",") {
let list = input
.split(",")
.map(|m| match m.parse() {
Ok(v) => v,
Err(e) => {
eprintln!("Invalid input: {:#?}", e);
choose_chapters();
0.
}
})
.collect();
ChapterSelection::List(list)
} else if input.as_str() == "all" {
ChapterSelection::All
} else {
if let Ok(n) = input.parse() {
return ChapterSelection::Single(n);
}
eprintln!("Invalid input");
choose_chapters()
}
}
pub fn get_input(msg: &str) -> String {
print!("{}", msg);
io::stdout().flush().expect("failed to flush stdout");
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.expect("Failed to read line");
input.trim().to_string()
}
pub fn convert_to_sixel(data: &[u8]) -> String {
let a = image::load_from_memory(data).unwrap();
let a = a.as_rgb8().unwrap();
let mut pixels = vec![];
a.pixels().for_each(|m| {
pixels.push(m.0[0]);
pixels.push(m.0[1]);
pixels.push(m.0[2]);
});
icy_sixel::sixel_string(
&pixels,
a.width() as i32,
a.height() as i32,
PixelFormat::RGB888,
DiffusionMethod::Auto,
MethodForLargest::Auto,
MethodForRep::Auto,
Quality::HIGH,
)
.unwrap()
}
// pub fn convert_to_/