diff --git a/Cargo.lock b/Cargo.lock index 81ed7a6..a1e7a0f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 4 +version = 3 [[package]] name = "adler2" @@ -396,7 +396,7 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "pbdbfixer" -version = "1.0.0" +version = "0.8.3" dependencies = [ "quick-xml", "rusqlite", diff --git a/Cargo.toml b/Cargo.toml index 7ec5c0b..08f848d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "pbdbfixer" -version = "1.0.0" +version = "0.9.0" authors = ["Martin Brodbeck "] -edition = "2024" +edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/README.md b/README.md index 564b3dc..1b3d891 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,8 @@ This program has been tested on a PocketBook It might work with other PocketBook devices/software versions. Please tell me if it works for you (and do make a backup of the explorer-3.db file before trying!). +**Please note:** As I do not currently own a PocketBook device, I am unfortunately unable to react to changes in the database structure. Perhaps you can send me your `explorer-3.db` so that I can take a look at the changes. Just get in touch with me. + ## Installation and Usage --- **WARNING**: diff --git a/src/database.rs b/src/database.rs index e640771..45d9771 100644 --- a/src/database.rs +++ b/src/database.rs @@ -30,7 +30,7 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec { r#" SELECT books.id, folders.name, files.filename, books.firstauthor, books.author, genres.name, first_author_letter, series - FROM books_impl books JOIN files, storages + FROM books_impl books JOIN files ON books.id = files.book_id JOIN folders ON folders.id = files.folder_id @@ -38,7 +38,7 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec { ON books.id = btg.bookid LEFT OUTER JOIN genres ON genres.id = btg.genreid - WHERE files.storageid IN (SELECT storages.id WHERE storages.type = 1) AND {}.ext = 'epub' + WHERE files.storageid = 1 AND {}.ext = 'epub' ORDER BY books.id"#, &books_or_files ); diff --git a/src/epub.rs b/src/epub.rs index 3ee7e28..3574377 100644 --- a/src/epub.rs +++ b/src/epub.rs @@ -1,4 +1,5 @@ use std::{ + borrow::Borrow, collections::HashMap, fs::{self, File}, io::Read, @@ -60,17 +61,18 @@ fn get_rootfile(archive: &mut ZipArchive) -> String { loop { match reader.read_event_into(&mut buf) { Ok(Event::Start(ref e)) | Ok(Event::Empty(ref e)) - if e.name().as_ref() == b"rootfile" => + if e.local_name().into_inner() == b"rootfile" => { - opf_filename = e - .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref() == b"full-path") - .next() - .unwrap() - .unwrap() - .unescape_value() - .unwrap() - .to_string(); + opf_filename = String::from_utf8( + e.attributes() + .filter(|attr| attr.as_ref().unwrap().key.into_inner() == b"full-path") + .next() + .unwrap() + .unwrap() + .value + .to_vec(), + ) + .unwrap(); break; } Ok(Event::Eof) => break, @@ -131,20 +133,20 @@ pub fn get_epub_metadata(filename: &str) -> Option { loop { match reader.read_event_into(&mut buf) { // See if we have EPUB3 or EPUB2 - Ok(Event::Start(ref e)) if e.name().as_ref() == b"package" => { + Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"package" => { if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"version" + attr.as_ref().unwrap().key.into_inner() == b"version" && attr.as_ref().unwrap().value.starts_with(b"3") }) { is_epub3 = true; } } - Ok(Event::Start(ref e)) if e.local_name().as_ref() == b"creator" => { + Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"creator" => { creator_found = true; if is_epub3 { if let Some(idval) = e .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref() == b"id") + .filter(|attr| attr.as_ref().unwrap().key.into_inner() == b"id") .next() { curr_id = "#".to_string() @@ -164,7 +166,7 @@ pub fn get_epub_metadata(filename: &str) -> Option { attr.as_ref() .unwrap() .key - .as_ref() + .into_inner() .ends_with(b"file-as") }) .next() @@ -173,13 +175,13 @@ pub fn get_epub_metadata(filename: &str) -> Option { let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new()); entry.sort = file_as_val .unwrap() - .unescape_value() + .decode_and_unescape_value(*reader.decoder().borrow()) .unwrap_or_default() .to_string(); entry.role = "aut".to_string(); } else if let Some(_role_val) = e .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref().ends_with(b"role")) + .filter(|attr| attr.as_ref().unwrap().key.into_inner().ends_with(b"role")) .next() { curr_id = "none".to_string() + xml_authors.len().to_string().as_str(); @@ -198,33 +200,33 @@ pub fn get_epub_metadata(filename: &str) -> Option { creator_found = false; } - Ok(Event::Start(ref e)) if e.local_name().as_ref() == b"meta" && is_epub3 => { + Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"meta" && is_epub3 => { if let Some(refines) = e .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref() == b"refines") + .filter(|attr| attr.as_ref().unwrap().key.into_inner() == b"refines") .next() { if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"property" + attr.as_ref().unwrap().key.into_inner() == b"property" && attr.as_ref().unwrap().value.ends_with(b"file-as") }) { curr_id = String::from_utf8(refines.unwrap().value.to_vec()).unwrap(); file_as_found = true; } else if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"property" + attr.as_ref().unwrap().key.into_inner() == b"property" && attr.as_ref().unwrap().value.ends_with(b"role") }) { curr_id = String::from_utf8(refines.unwrap().value.to_vec()).unwrap(); role_found = true; } else if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"property" + attr.as_ref().unwrap().key.into_inner() == b"property" && attr.as_ref().unwrap().value.ends_with(b"group-position") }) { series_index_found = true; } } if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"property" + attr.as_ref().unwrap().key.into_inner() == b"property" && attr .as_ref() .unwrap() @@ -234,9 +236,9 @@ pub fn get_epub_metadata(filename: &str) -> Option { series_found = true; } } - Ok(Event::Empty(ref e)) if e.local_name().as_ref() == b"meta" && !is_epub3 => { + Ok(Event::Empty(ref e)) if e.local_name().into_inner() == b"meta" && !is_epub3 => { if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"name" + attr.as_ref().unwrap().key.into_inner() == b"name" && attr .as_ref() .unwrap() @@ -246,15 +248,15 @@ pub fn get_epub_metadata(filename: &str) -> Option { }) { epub_meta.series.name = e .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref() == b"content") + .filter(|attr| attr.as_ref().unwrap().key.into_inner() == b"content") .next() .unwrap() .unwrap() - .unescape_value() + .decode_and_unescape_value(*reader.decoder().borrow()) .unwrap_or_default() .to_string(); } else if e.attributes().any(|attr| { - attr.as_ref().unwrap().key.as_ref() == b"name" + attr.as_ref().unwrap().key.into_inner() == b"name" && attr .as_ref() .unwrap() @@ -264,11 +266,11 @@ pub fn get_epub_metadata(filename: &str) -> Option { }) { let index_float = e .attributes() - .filter(|attr| attr.as_ref().unwrap().key.as_ref() == b"content") + .filter(|attr| attr.as_ref().unwrap().key.into_inner() == b"content") .next() .unwrap() .unwrap() - .unescape_value() + .decode_and_unescape_value(*reader.decoder().borrow()) .unwrap_or_default() .parse::() .unwrap_or_default(); @@ -300,10 +302,11 @@ pub fn get_epub_metadata(filename: &str) -> Option { series_index_found = false; } - Ok(Event::Start(ref e)) if e.local_name().as_ref() == b"subject" => { + Ok(Event::Start(ref e)) if e.local_name().into_inner() == b"subject" => { genre_found = true; } Ok(Event::Text(ref e)) if genre_found => { + //epub_meta.genre = e.unescape_and_decode(&reader).unwrap(); epub_meta.genre = e.unescape().unwrap().to_string(); genre_found = false; }