slightly improve performance by replacing iterators with owning for loops

This commit is contained in:
2025-06-04 00:08:33 +02:00
parent 6d4ffa209a
commit 2f381d540b
5 changed files with 313 additions and 288 deletions

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ Cargo.lock
*.jpg *.jpg
*.png *.png
*.cbz *.cbz
*.json

View File

@@ -1,14 +1,17 @@
#![feature(test)]
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware}; use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
use response_deserializer::{ChapterImages, SearchResult}; use response_deserializer::{ChapterImages, SearchResult};
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use std::pin::Pin;
use zip::write::{SimpleFileOptions, ZipWriter}; use zip::write::{SimpleFileOptions, ZipWriter};
use zip::CompressionMethod; use zip::CompressionMethod;
mod response_deserializer; mod response_deserializer;
mod select; mod select;
mod test;
mod util; mod util;
use response_deserializer::{Chapter, Id}; use response_deserializer::{Chapter, Id};
@@ -256,7 +259,7 @@ async fn main() {
let zip_file_path = format!("{} - Volume {:0>3}.cbz", title.en, volume); let zip_file_path = format!("{} - Volume {:0>3}.cbz", title.en, volume);
let zip_file_path = Path::new(&zip_file_path); let zip_file_path = Path::new(&zip_file_path);
let zip_file = File::create(&zip_file_path).unwrap(); let zip_file = File::create(zip_file_path).unwrap();
let mut zip = ZipWriter::new(zip_file); let mut zip = ZipWriter::new(zip_file);
let options = let options =
@@ -281,7 +284,7 @@ async fn main() {
let zip_file_path = format!("{} - Chapter {:0>3}.cbz", title.en, chapter); let zip_file_path = format!("{} - Chapter {:0>3}.cbz", title.en, chapter);
let zip_file_path = Path::new(&zip_file_path); let zip_file_path = Path::new(&zip_file_path);
let zip_file = File::create(&zip_file_path).unwrap(); let zip_file = File::create(zip_file_path).unwrap();
let mut zip = ZipWriter::new(zip_file); let mut zip = ZipWriter::new(zip_file);
let options = let options =
@@ -403,7 +406,8 @@ async fn select_manga_from_search(
util::CoverSize::W512 => ".512.jpg", util::CoverSize::W512 => ".512.jpg",
}; };
let mut entry_futures = Vec::new(); use std::future::Future;
let mut entry_futures: Vec<Pin<Box<dyn Future<Output = Entry>>>> = Vec::new();
for result in results.data.iter() { for result in results.data.iter() {
let mut entry = Entry::new(result.attributes.title.en.clone()); let mut entry = Entry::new(result.attributes.title.en.clone());
if let Some(year) = result.attributes.year { if let Some(year) = result.attributes.year {
@@ -425,6 +429,7 @@ async fn select_manga_from_search(
if let Some(cover_data) = &result.relationships[2].attributes { if let Some(cover_data) = &result.relationships[2].attributes {
// The lib used for converting to sixel is abysmally slow for larger images, this // The lib used for converting to sixel is abysmally slow for larger images, this
// should be in a future to allow for multithreaded work // should be in a future to allow for multithreaded work
if config.cover != Some(false) {
let future = async move { let future = async move {
let image_url = format!( let image_url = format!(
"https://uploads.mangadex.org/covers/{id}/{}{cover_ex}", "https://uploads.mangadex.org/covers/{id}/{}{cover_ex}",
@@ -443,7 +448,10 @@ async fn select_manga_from_search(
entry.set_image(result); entry.set_image(result);
entry entry
}; };
entry_futures.push(future); entry_futures.push(Box::pin(future));
} else {
entry_futures.push(Box::pin(async move { entry }));
}
} }
} }
let entries = futures::future::join_all(entry_futures).await; let entries = futures::future::join_all(entry_futures).await;

View File

@@ -405,7 +405,7 @@ struct ChapterContent {
#[serde(rename = "type")] #[serde(rename = "type")]
type_name: String, type_name: String,
attributes: ChapterAttributesContent, attributes: ChapterAttributesContent,
relationships: Vec<ContentRelationShip>, relationships: Vec<ContentRelationship>,
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@@ -428,7 +428,7 @@ pub struct Chapter {
pub id: Id, pub id: Id,
pub data_type: DataType, pub data_type: DataType,
pub attributes: ChapterAttributes, pub attributes: ChapterAttributes,
pub relationships: Vec<ChapterRelationShip>, pub relationships: Vec<ChapterRelationship>,
} }
#[derive(Debug)] #[derive(Debug)]
@@ -450,7 +450,7 @@ pub struct Manga {
pub id: Id, pub id: Id,
pub data_type: DataType, pub data_type: DataType,
pub attributes: MangaAttributes, pub attributes: MangaAttributes,
pub relationships: Vec<RelationShip>, pub relationships: Vec<Relationship>,
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@@ -476,7 +476,7 @@ struct ContentData {
#[serde(rename = "type")] #[serde(rename = "type")]
type_name: String, type_name: String,
attributes: ContentAttributes, attributes: ContentAttributes,
relationships: Vec<ContentRelationShip>, relationships: Vec<ContentRelationship>,
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@@ -504,7 +504,7 @@ struct ContentAttributes {
latest_uploaded_chapter: Option<String>, latest_uploaded_chapter: Option<String>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct ContentCoverAttributes { struct ContentCoverAttributes {
description: String, description: String,
@@ -527,13 +527,13 @@ pub struct CoverAttributes {
pub version: u32, pub version: u32,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct ContentTag { pub struct ContentTag {
id: String, id: String,
#[serde(rename = "type")] #[serde(rename = "type")]
type_name: String, type_name: String,
attributes: TagAttributes, attributes: TagAttributes,
relationships: Vec<ContentRelationShip>, relationships: Vec<ContentRelationship>,
} }
#[derive(Debug)] #[derive(Debug)]
@@ -541,11 +541,11 @@ pub struct Tag {
pub id: Id, pub id: Id,
pub data_type: DataType, pub data_type: DataType,
pub attributes: TagAttributes, pub attributes: TagAttributes,
pub relationships: Vec<RelationShip>, pub relationships: Vec<Relationship>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
struct ContentRelationShip { struct ContentRelationship {
id: String, id: String,
#[serde(rename = "type")] #[serde(rename = "type")]
type_name: String, type_name: String,
@@ -554,20 +554,20 @@ struct ContentRelationShip {
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ChapterRelationShip { pub struct ChapterRelationship {
id: Id, id: Id,
data_type: DataType, data_type: DataType,
} }
#[derive(Debug)] // TODO: Typo: Relationship #[derive(Debug)]
pub struct RelationShip { pub struct Relationship {
pub id: Id, pub id: Id,
pub data_type: DataType, pub data_type: DataType,
pub related: Option<String>, pub related: Option<String>,
pub attributes: Option<CoverAttributes>, pub attributes: Option<CoverAttributes>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct TagAttributes { pub struct TagAttributes {
pub name: TagName, pub name: TagName,
pub description: Description, pub description: Description,
@@ -575,12 +575,12 @@ pub struct TagAttributes {
pub version: u32, pub version: u32,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct TagName { pub struct TagName {
pub en: String, pub en: String,
} }
#[derive(Deserialize, Debug, Default, Clone)] #[derive(Deserialize, Debug, Default)]
pub struct Links { pub struct Links {
al: Option<String>, al: Option<String>,
ap: Option<String>, ap: Option<String>,
@@ -594,20 +594,20 @@ pub struct Links {
engtl: Option<String>, engtl: Option<String>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct Description { pub struct Description {
en: Option<String>, en: Option<String>,
ru: Option<String>, ru: Option<String>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct AltTitles { pub struct AltTitles {
en: Option<String>, en: Option<String>,
ja: Option<String>, ja: Option<String>,
ru: Option<String>, ru: Option<String>,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug)]
pub struct Titles { pub struct Titles {
pub en: String, pub en: String,
} }
@@ -955,84 +955,28 @@ fn convert_response_to_result(
) -> Result<SearchResult, ResponseConversionError> { ) -> Result<SearchResult, ResponseConversionError> {
let response = (search_response.response.as_str()) let response = (search_response.response.as_str())
.try_into() .try_into()
.map_err(|_| ResponseConversionError::Result(search_response.response.clone()))?; .map_err(|_| ResponseConversionError::Result(search_response.response))?;
let result: ResponseResult = (search_response.result.as_str()) let result: ResponseResult = (search_response.result.as_str())
.try_into() .try_into()
.map_err(|_| ResponseConversionError::Result({ search_response.result.clone() }))?; .map_err(|_| ResponseConversionError::Result(search_response.result))?;
let data: Vec<Manga> = search_response let mut data: Result<Vec<Manga>, ResponseConversionError> =
.data Ok(Vec::with_capacity(search_response.data.len()));
.iter() for m in search_response.data {
.map(|m| { if let Ok(ref mut d) = data {
Ok(Manga { match convert_data_to_manga(m) {
id: Id(m.id.clone()), Ok(v) => d.push(v),
data_type: (m.type_name.as_str()).try_into().map_err(|_| { Err(e) => {
ResponseConversionError::AttributeError(AttributeConversionError::DataType( data = Err(e);
m.type_name.clone(), break;
)) }
})?, }
attributes: convert_attributes(&m.attributes) }
.map_err(ResponseConversionError::AttributeError)?,
relationships: m
.relationships
.iter()
.map(|m| {
Ok(RelationShip {
id: Id(m.id.clone()),
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
AttributeConversionError::DataType(m.type_name.clone())
})?,
attributes: {
if let Some(attributes) = &m.attributes {
Some(CoverAttributes {
created_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| {
AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(),
)
})?,
updated_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| {
AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(),
)
})?,
// TODO: Something should probably be done here
description: String::new(),
file_name: Id(attributes.file_name.clone()),
locale: (attributes.locale.as_str()).try_into().map_err(
|_| {
AttributeConversionError::Locale(
attributes.locale.clone(),
)
},
)?,
version: attributes.version,
volume: match &attributes.volume {
Some(v) => v.parse().ok(),
None => None,
},
})
} else {
None
} }
},
related: m.related.clone(),
})
})
.collect::<Result<Vec<RelationShip>, AttributeConversionError>>()
.map_err(ResponseConversionError::AttributeError)?,
})
})
.collect::<Result<Vec<Manga>, ResponseConversionError>>()?;
Ok(SearchResult { Ok(SearchResult {
response, response,
result, result,
data, data: data?,
}) })
} }
@@ -1080,20 +1024,18 @@ impl Display for ContentRating {
} }
fn convert_attributes( fn convert_attributes(
attributes: &ContentAttributes, attributes: ContentAttributes,
) -> Result<MangaAttributes, AttributeConversionError> { ) -> Result<MangaAttributes, AttributeConversionError> {
Ok(MangaAttributes { Ok(MangaAttributes {
title: attributes.title.clone(), title: attributes.title,
alt_titles: attributes.alt_titles.clone(), alt_titles: attributes.alt_titles,
description: attributes.description.clone(), description: attributes.description,
is_locked: attributes.is_locked, is_locked: attributes.is_locked,
links: attributes.links.clone(), links: attributes.links,
original_language: (attributes.original_language.as_str()) original_language: (attributes.original_language.as_str())
.try_into() .try_into()
.map_err(|_| { .map_err(|_| AttributeConversionError::Language(attributes.original_language))?,
AttributeConversionError::Language(attributes.original_language.clone()) last_volume: match attributes.last_volume {
})?,
last_volume: match attributes.last_volume.clone() {
Some(s) => match s.parse() { Some(s) => match s.parse() {
Ok(v) => Some(v), Ok(v) => Some(v),
Err(_) => { Err(_) => {
@@ -1106,7 +1048,7 @@ fn convert_attributes(
}, },
None => None, None => None,
}, },
last_chapter: match attributes.last_chapter.clone() { last_chapter: match attributes.last_chapter {
Some(n) => match n.parse() { Some(n) => match n.parse() {
Ok(v) => Some(v), Ok(v) => Some(v),
Err(_) => { Err(_) => {
@@ -1119,83 +1061,101 @@ fn convert_attributes(
}, },
None => None, None => None,
}, },
publication_demographic: match attributes.publication_demographic.clone() { publication_demographic: match attributes.publication_demographic {
Some(s) => Some( Some(s) => Some(
(s.as_str()) (s.as_str())
.try_into() .try_into()
.map_err(|_| AttributeConversionError::PublicationDemographic(s.clone()))?, .map_err(|_| AttributeConversionError::PublicationDemographic(s))?,
), ),
None => None, None => None,
}, },
status: (attributes.status.as_str()) status: (attributes.status.as_str())
.try_into() .try_into()
.map_err(|_| AttributeConversionError::Status(attributes.status.clone()))?, .map_err(|_| AttributeConversionError::Status(attributes.status))?,
year: attributes.year, year: attributes.year,
content_rating: attributes.content_rating.as_str().try_into().map_err(|_| { content_rating: attributes
AttributeConversionError::ContentRating(attributes.content_rating.clone()) .content_rating
})?, .as_str()
tags: attributes .try_into()
.tags .map_err(|_| AttributeConversionError::ContentRating(attributes.content_rating))?,
.clone() tags: {
.iter() let mut tags = Vec::with_capacity(attributes.tags.len());
.map(|m| { for m in attributes.tags {
Ok(Tag { tags.push(({
|| {
Ok::<Tag, AttributeConversionError>(Tag {
data_type: (m.type_name.as_str()) data_type: (m.type_name.as_str())
.try_into() .try_into()
.map_err(|_| AttributeConversionError::DataType(m.type_name.clone()))?, .map_err(|_| AttributeConversionError::DataType(m.type_name))?,
id: Id(m.id.clone()), id: Id(m.id),
relationships: m relationships: {
.relationships let mut relationships = Vec::with_capacity(m.relationships.len());
.iter() for m in m.relationships {
.map(|m| { relationships.push(({
Ok(RelationShip { || {
id: Id(m.id.clone()), Ok::<Relationship, AttributeConversionError>(
data_type: (m.type_name.as_str()).try_into().map_err(|_| { Relationship {
AttributeConversionError::DataType(m.type_name.clone()) id: Id(m.id),
data_type: (m.type_name.as_str())
.try_into()
.map_err(|_| {
AttributeConversionError::DataType(
m.type_name,
)
})?, })?,
// TODO: Do this // TODO: Do this
attributes: None, attributes: None,
related: m.related.clone(), related: m.related,
}) },
}) )
.collect::<Result<Vec<RelationShip>, AttributeConversionError>>()?, }
})()?);
}
relationships
},
attributes: TagAttributes { attributes: TagAttributes {
name: m.attributes.name.clone(), name: m.attributes.name,
group: m.attributes.group.clone(), group: m.attributes.group,
version: m.attributes.version, version: m.attributes.version,
description: Description { description: Description {
en: m.attributes.description.en.clone(), en: m.attributes.description.en,
ru: m.attributes.description.ru.clone(), ru: m.attributes.description.ru,
}, },
}, },
}) })
}) }
.collect::<Result<Vec<Tag>, AttributeConversionError>>()?, })()?);
}
tags
},
state: (attributes.state.as_str()) state: (attributes.state.as_str())
.try_into() .try_into()
.map_err(|_| AttributeConversionError::State(attributes.state.clone()))?, .map_err(|_| AttributeConversionError::State(attributes.state))?,
chapter_numbers_reset_on_new_volume: attributes.chapter_numbers_reset_on_new_volume, chapter_numbers_reset_on_new_volume: attributes.chapter_numbers_reset_on_new_volume,
created_at: DateTime::parse_from_rfc3339(&attributes.created_at).map_err(|_| { created_at: DateTime::parse_from_rfc3339(&attributes.created_at).map_err(|_| {
AttributeConversionError::CreatedAtDateTime(attributes.created_at.clone()) AttributeConversionError::CreatedAtDateTime(attributes.created_at.clone())
})?, })?,
updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at).map_err(|_| { updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at)
AttributeConversionError::UpdatedAtDateTime(attributes.created_at.clone()) .map_err(|_| AttributeConversionError::UpdatedAtDateTime(attributes.created_at))?,
})?,
version: attributes.version, version: attributes.version,
available_translated_languages: attributes available_translated_languages: {
.available_translated_languages let mut av = Vec::with_capacity(attributes.available_translated_languages.len());
.iter() for m in attributes.available_translated_languages {
.map(|m| { av.push(({
Ok(match m { || {
Ok::<Option<Language>, AttributeConversionError>(match m {
Some(s) => Some( Some(s) => Some(
(s.as_str()) (s.as_str())
.try_into() .try_into()
.map_err(|_| AttributeConversionError::Language(s.clone()))?, .map_err(|_| AttributeConversionError::Language(s))?,
), ),
None => None, None => None,
}) })
}) }
.collect::<Result<Vec<Option<Language>>, AttributeConversionError>>()?, })()?);
}
av
},
latest_uploaded_chapter: attributes latest_uploaded_chapter: attributes
.latest_uploaded_chapter .latest_uploaded_chapter
.as_ref() .as_ref()
@@ -1203,35 +1163,32 @@ fn convert_attributes(
}) })
} }
// *****
pub fn deserialize_id_query(json: &str) -> IdQueryResult { pub fn deserialize_id_query(json: &str) -> IdQueryResult {
let id_query_response: IdQueryResponse = match serde_json::from_str(json) { let id_query_response: IdQueryResponse = match serde_json::from_str(json) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e); eprintln!("ERROR: {:#?}", e);
std::fs::write("out.json", json).unwrap();
std::process::exit(1); std::process::exit(1);
} }
}; };
convert_some_test(id_query_response).unwrap() convert_id_query(id_query_response).unwrap()
} }
fn convert_some_test(input: IdQueryResponse) -> Result<IdQueryResult, AttributeConversionError> { fn convert_id_query(response: IdQueryResponse) -> Result<IdQueryResult, AttributeConversionError> {
Ok(IdQueryResult { Ok(IdQueryResult {
result: input.result.as_str().try_into().unwrap(), result: response.result.as_str().try_into().unwrap(),
response: input.response.as_str().try_into().unwrap(), response: response.response.as_str().try_into().unwrap(),
data: convert_data_to_manga(input.data).unwrap(), data: convert_data_to_manga(response.data).unwrap(),
}) })
} }
// *****
pub fn deserialize_chapter_feed(json: &str) -> ChapterFeed { pub fn deserialize_chapter_feed(json: &str) -> ChapterFeed {
let chapter_feed_response: ChapterFeedResponse = match serde_json::from_str(json) { let chapter_feed_response: ChapterFeedResponse = match serde_json::from_str(json) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e); eprintln!("ERROR: {:#?}", e);
std::fs::write("chapter_feed.json", json).unwrap();
std::process::exit(1); std::process::exit(1);
} }
}; };
@@ -1242,8 +1199,8 @@ pub fn deserializer(json: &str) -> SearchResult {
let search_response: SearchResponse = match serde_json::from_str(json) { let search_response: SearchResponse = match serde_json::from_str(json) {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
std::fs::write("out.json", json).unwrap();
eprintln!("ERROR: {:#?}", e); eprintln!("ERROR: {:#?}", e);
std::fs::write("search_result.json", json).unwrap();
std::process::exit(1); std::process::exit(1);
} }
}; };
@@ -1268,12 +1225,12 @@ enum ChapterFeedConversionError {
enum ChapterConversionError { enum ChapterConversionError {
DataType(String), DataType(String),
Id(String), Id(String),
RelationShip(ChapterRelationShipError), Relationship(ChapterRelationshipError),
Attributes(ChapterAttributeConversionError), Attributes(ChapterAttributeConversionError),
} }
#[derive(Debug)] #[derive(Debug)]
enum ChapterRelationShipError { enum ChapterRelationshipError {
TypeData(String), TypeData(String),
Id(String), Id(String),
} }
@@ -1281,41 +1238,59 @@ enum ChapterRelationShipError {
fn convert_chapter_feed( fn convert_chapter_feed(
feed: ChapterFeedResponse, feed: ChapterFeedResponse,
) -> Result<ChapterFeed, ChapterFeedConversionError> { ) -> Result<ChapterFeed, ChapterFeedConversionError> {
// Now this is a bit of an abomination. It uses closures such that you can use the ? syntax
// sugar to return an error. Also uses for loops instead of iterators since they do not take
// ownership. I think I should have just kept the iterators.
let mut data: Vec<Chapter> = Vec::with_capacity(feed.data.len());
for m in feed.data {
let chapter: Chapter = ({
|| {
Ok::<Chapter, ChapterConversionError>(Chapter {
data_type: (m.type_name.as_str())
.try_into()
.map_err(|_| ChapterConversionError::DataType(m.type_name))?,
id: Id(m.id),
attributes: convert_chapter_attributes(m.attributes)
.map_err(ChapterConversionError::Attributes)?,
relationships: {
let mut relationships = Vec::with_capacity(m.relationships.len());
for r in m.relationships {
relationships.push(
({
|| {
Ok({
ChapterRelationship {
data_type: (r.type_name.as_str())
.try_into()
.map_err(|_| {
ChapterRelationshipError::TypeData(
r.type_name,
)
})?,
id: Id(r.id),
}
})
}
})()
.map_err(ChapterConversionError::Relationship)?,
);
}
relationships
},
})
}
})()
.map_err(ChapterFeedConversionError::Chapter)?;
data.push(chapter);
}
Ok(ChapterFeed { Ok(ChapterFeed {
result: (feed.result.as_str()) result: (feed.result.as_str())
.try_into() .try_into()
.map_err(|_| ChapterFeedConversionError::Result(feed.result.clone()))?, .map_err(|_| ChapterFeedConversionError::Result(feed.result))?,
response: (feed.response.as_str()) response: (feed.response.as_str())
.try_into() .try_into()
.map_err(|_| ChapterFeedConversionError::Result(feed.response.clone()))?, .map_err(|_| ChapterFeedConversionError::Result(feed.response))?,
data: feed data,
.data
.iter()
.map(|m| {
Ok(Chapter {
data_type: (m.type_name.as_str())
.try_into()
.map_err(|_| ChapterConversionError::DataType(m.type_name.clone()))?,
id: Id(m.id.clone()),
attributes: convert_chapter_attributes(&m.attributes)
.map_err(ChapterConversionError::Attributes)?,
relationships: m
.relationships
.iter()
.map(|m| {
Ok(ChapterRelationShip {
data_type: (m.type_name.as_str()).try_into().map_err(|_| {
ChapterRelationShipError::TypeData(m.type_name.clone())
})?,
id: Id(m.id.clone()),
})
})
.collect::<Result<Vec<ChapterRelationShip>, ChapterRelationShipError>>()
.map_err(ChapterConversionError::RelationShip)?,
})
})
.collect::<Result<Vec<Chapter>, ChapterConversionError>>()
.map_err(ChapterFeedConversionError::Chapter)?,
limit: feed.limit, limit: feed.limit,
offset: feed.offset, offset: feed.offset,
total: feed.total, total: feed.total,
@@ -1333,7 +1308,7 @@ enum ChapterAttributeConversionError {
} }
fn convert_chapter_attributes( fn convert_chapter_attributes(
attributes: &ChapterAttributesContent, attributes: ChapterAttributesContent,
) -> Result<ChapterAttributes, ChapterAttributeConversionError> { ) -> Result<ChapterAttributes, ChapterAttributeConversionError> {
Ok(ChapterAttributes { Ok(ChapterAttributes {
volume: match &attributes.volume { volume: match &attributes.volume {
@@ -1350,24 +1325,19 @@ fn convert_chapter_attributes(
}, },
None => None, None => None,
}, },
created_at: DateTime::parse_from_rfc3339(&attributes.created_at).map_err(|_| { created_at: DateTime::parse_from_rfc3339(&attributes.created_at)
ChapterAttributeConversionError::CreatedAt(attributes.created_at.clone()) .map_err(|_| ChapterAttributeConversionError::CreatedAt(attributes.created_at))?,
})?, published_at: DateTime::parse_from_rfc3339(&attributes.publish_at)
published_at: DateTime::parse_from_rfc3339(&attributes.publish_at).map_err(|_| { .map_err(|_| ChapterAttributeConversionError::CreatedAt(attributes.publish_at))?,
ChapterAttributeConversionError::CreatedAt(attributes.publish_at.clone()) updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at)
})?, .map_err(|_| ChapterAttributeConversionError::CreatedAt(attributes.updated_at))?,
updated_at: DateTime::parse_from_rfc3339(&attributes.updated_at).map_err(|_| { external_url: attributes.external_url,
ChapterAttributeConversionError::CreatedAt(attributes.updated_at.clone()) title: attributes.title,
})?,
external_url: attributes.external_url.clone(),
title: attributes.title.clone(),
pages: attributes.pages, pages: attributes.pages,
translated_language: (attributes.translated_language.as_str()) translated_language: (attributes.translated_language.as_str())
.try_into() .try_into()
.map_err(|_| { .map_err(|_| {
ChapterAttributeConversionError::TranslatedLanguage( ChapterAttributeConversionError::TranslatedLanguage(attributes.translated_language)
attributes.translated_language.clone(),
)
})?, })?,
version: attributes.version, version: attributes.version,
}) })
@@ -1377,7 +1347,7 @@ fn convert_chapter_images(data: ChapterImagesContent) -> Result<ChapterImages, C
Ok(ChapterImages { Ok(ChapterImages {
result: (data.result.as_str()) result: (data.result.as_str())
.try_into() .try_into()
.map_err(|_| ChapterImageError::Result(data.result.clone()))?, .map_err(|_| ChapterImageError::Result(data.result))?,
base_url: data.base_url, base_url: data.base_url,
chapter: ChapterImageData { chapter: ChapterImageData {
hash: data.chapter.hash, hash: data.chapter.hash,
@@ -1405,33 +1375,39 @@ pub fn deserialize_chapter_images(json: &str) -> Result<ChapterImages, ChapterIm
fn convert_data_to_manga(m: ContentData) -> Result<Manga, ResponseConversionError> { fn convert_data_to_manga(m: ContentData) -> Result<Manga, ResponseConversionError> {
Ok(Manga { Ok(Manga {
id: Id(m.id.clone()), id: Id(m.id),
data_type: (m.type_name.as_str()).try_into().map_err(|_| { data_type: (m.type_name.as_str()).try_into().map_err(|_| {
ResponseConversionError::AttributeError(AttributeConversionError::DataType( ResponseConversionError::AttributeError(AttributeConversionError::DataType(m.type_name))
m.type_name.clone(),
))
})?, })?,
attributes: convert_attributes(&m.attributes) attributes: convert_attributes(m.attributes)
.map_err(ResponseConversionError::AttributeError)?, .map_err(ResponseConversionError::AttributeError)?,
relationships: m relationships: {
.relationships let mut relationships = Vec::with_capacity(m.relationships.len());
.iter() for m in m.relationships {
.map(|m| { relationships.push(
Ok(RelationShip { ({
id: Id(m.id.clone()), || {
data_type: (m.type_name.as_str()) Ok::<Relationship, AttributeConversionError>(Relationship {
id: Id(m.id),
data_type: m
.type_name
.as_str()
.try_into() .try_into()
.map_err(|_| AttributeConversionError::DataType(m.type_name.clone()))?, .map_err(|_| AttributeConversionError::DataType(m.type_name))?,
attributes: { attributes: {
if let Some(attributes) = &m.attributes { if let Some(attributes) = m.attributes {
Some(CoverAttributes { Some(CoverAttributes {
created_at: DateTime::parse_from_rfc3339(&attributes.created_at) created_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| { .map_err(|_| {
AttributeConversionError::CreatedAtDateTime( AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(), attributes.created_at.clone(),
) )
})?, })?,
updated_at: DateTime::parse_from_rfc3339(&attributes.created_at) updated_at: DateTime::parse_from_rfc3339(
&attributes.created_at,
)
.map_err(|_| { .map_err(|_| {
AttributeConversionError::CreatedAtDateTime( AttributeConversionError::CreatedAtDateTime(
attributes.created_at.clone(), attributes.created_at.clone(),
@@ -1440,8 +1416,12 @@ fn convert_data_to_manga(m: ContentData) -> Result<Manga, ResponseConversionErro
// TODO: Something should probably be done here // TODO: Something should probably be done here
description: String::new(), description: String::new(),
file_name: Id(attributes.file_name.clone()), file_name: Id(attributes.file_name.clone()),
locale: (attributes.locale.as_str()).try_into().map_err(|_| { locale: (attributes.locale.as_str())
AttributeConversionError::Locale(attributes.locale.clone()) .try_into()
.map_err(|_| {
AttributeConversionError::Locale(
attributes.locale.clone(),
)
})?, })?,
version: attributes.version, version: attributes.version,
volume: match &attributes.volume { volume: match &attributes.volume {
@@ -1455,8 +1435,12 @@ fn convert_data_to_manga(m: ContentData) -> Result<Manga, ResponseConversionErro
}, },
related: m.related.clone(), related: m.related.clone(),
}) })
}) }
.collect::<Result<Vec<RelationShip>, AttributeConversionError>>() })()
.map_err(ResponseConversionError::AttributeError)?, .map_err(ResponseConversionError::AttributeError)?,
);
}
relationships
},
}) })
} }

14
src/test.rs Normal file
View File

@@ -0,0 +1,14 @@
extern crate test;
#[cfg(test)]
mod tests {
use super::*;
use crate::response_deserializer;
use test::Bencher;
#[bench]
fn loops(b: &mut Bencher) {
let search_result = std::fs::read_to_string("test_data/search_result.json").unwrap();
b.iter(|| response_deserializer::deserializer(&search_result));
}
}

View File

@@ -51,6 +51,7 @@ pub struct Config {
pub selection_type: Option<ConfigSelectionType>, pub selection_type: Option<ConfigSelectionType>,
pub selection_range: Option<String>, pub selection_range: Option<String>,
pub search: Option<ConfigSearch>, pub search: Option<ConfigSearch>,
pub cover: Option<bool>,
} }
pub enum ConfigSearch { pub enum ConfigSearch {
@@ -308,6 +309,7 @@ impl Config {
result_limit: 5, result_limit: 5,
selection_type: None, selection_type: None,
selection_range: None, selection_range: None,
cover: None,
} }
} }
} }
@@ -429,6 +431,22 @@ pub fn args() -> Config {
} }
}; };
} }
"--cover" => {
config.cover = match args.next() {
Some(s) => Some(match s.as_str() {
"true" => true,
"false" => false,
s => {
eprintln!("Invalid value for cover size, valid values: [\"256\", \"512\", \"full\"], found: {s}");
std::process::exit(1);
}
}),
None => {
eprintln!("Missing value for cover size, valid values: [\"256\", \"512\", \"full\"]");
std::process::exit(1);
}
};
}
s => { s => {
eprintln!("Found invalid argument: {s}"); eprintln!("Found invalid argument: {s}");
std::process::exit(1); std::process::exit(1);