Compare commits

..

No commits in common. "a9620b3e347ffca930c78c52b248104f87588018" and "247591707db8f1f0bf895231ca3ee66103687086" have entirely different histories.

4 changed files with 61 additions and 74 deletions

2
Cargo.lock generated
View file

@ -148,7 +148,7 @@ dependencies = [
[[package]]
name = "pbdbfixer"
version = "0.6.0"
version = "0.5.0"
dependencies = [
"quick-xml",
"rusqlite",

View file

@ -1,6 +1,6 @@
[package]
name = "pbdbfixer"
version = "0.6.0"
version = "0.5.0"
authors = ["Martin Brodbeck <martin@brodbeck-online.de>"]
edition = "2018"

View file

@ -96,16 +96,6 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
role: String,
}
impl XmlAut {
fn new() -> Self {
XmlAut {
name: String::new(),
sort: String::new(),
role: String::new(),
}
}
}
let mut xml_authors = HashMap::new();
loop {
@ -131,7 +121,14 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
+ String::from_utf8(idval.unwrap().value.to_vec())
.unwrap()
.as_str();
xml_authors.insert(curr_id.clone(), XmlAut::new());
xml_authors.insert(
curr_id.clone(),
XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
},
);
}
} else {
if let Some(file_as_val) = e
@ -139,28 +136,45 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
.filter(|attr| attr.as_ref().unwrap().key.ends_with(b"file-as"))
.next()
{
curr_id = "none".to_string() + xml_authors.len().to_string().as_str();
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new());
let ns =
String::from_utf8(file_as_val.as_ref().unwrap().key.to_vec()).unwrap();
curr_id = "none".to_string() + ns.split(':').collect::<Vec<&str>>()[0];
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
});
entry.sort = file_as_val
.unwrap()
.unescape_and_decode_value(&reader)
.unwrap_or_default();
entry.role = "aut".to_string();
} else if let Some(_role_val) = e
}
if let Some(role_val) = e
.attributes()
.filter(|attr| attr.as_ref().unwrap().key.ends_with(b"role"))
.next()
{
curr_id = "none".to_string() + xml_authors.len().to_string().as_str();
let ns =
String::from_utf8(role_val.as_ref().unwrap().key.to_vec()).unwrap();
curr_id = "none".to_string() + ns.split(':').collect::<Vec<&str>>()[0];
}
}
}
Ok(Event::Text(ref e)) if creator_found => {
if is_epub3 {
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new());
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
});
entry.name = String::from_utf8(e.to_vec()).unwrap();
} else {
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new());
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
});
entry.name = String::from_utf8(e.to_vec()).unwrap();
entry.role = "aut".to_string();
}
@ -189,13 +203,21 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
}
}
Ok(Event::Text(ref e)) if file_as_found && is_epub3 => {
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new());
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
});
entry.sort = String::from_utf8(e.to_vec()).unwrap();
file_as_found = false;
}
Ok(Event::Text(ref e)) if role_found && is_epub3 => {
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut::new());
let entry = xml_authors.entry(curr_id.clone()).or_insert(XmlAut {
name: "".to_string(),
sort: "".to_string(),
role: "".to_string(),
});
entry.role = String::from_utf8(e.to_vec()).unwrap();
role_found = false;
@ -212,8 +234,6 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
}
}
//println!("Meta: {:?}", &xml_authors);
epub_meta.authors = xml_authors
.into_iter()
.filter(|&(_, ref xml_author)| &xml_author.role == "aut" && &xml_author.name.len() > &0)
@ -223,7 +243,5 @@ pub fn get_epub_metadata(filename: &str) -> Option<EpubMetadata> {
})
.collect();
//println!("Meta: {:?}", &epub_meta);
Some(epub_meta)
}

View file

@ -11,7 +11,6 @@ struct BookEntry {
firstauthor: String,
has_drm: bool,
genre: String,
first_author_letter: String,
}
fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
@ -19,9 +18,8 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
let mut stmt = tx
.prepare(
r#"
SELECT books.id, folders.name, files.filename, books.firstauthor,
books.author, genres.name, first_author_letter
r"
SELECT books.id, folders.name, files.filename, books.firstauthor, books.author, genres.name
FROM books_impl books JOIN files
ON books.id = files.book_id
JOIN folders
@ -31,7 +29,7 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
LEFT OUTER JOIN genres
ON genres.id = btg.genreid
WHERE files.storageid = 1 AND books.ext = 'epub'
ORDER BY books.id"#,
ORDER BY books.id",
)
.unwrap();
@ -49,7 +47,6 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
_ => false,
};
let genre: String = row.get(5).unwrap_or_default();
let first_author_letter = row.get(6).unwrap_or_default();
let entry = BookEntry {
id: book_id,
@ -58,7 +55,6 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
author,
has_drm,
genre,
first_author_letter,
};
book_entries.push(entry);
@ -70,7 +66,7 @@ fn get_epubs_from_database(tx: &Transaction) -> Vec<BookEntry> {
fn remove_ghost_books_from_db(tx: &Transaction) -> usize {
let mut stmt = tx
.prepare(
r#"
r"
DELETE FROM books_impl
WHERE id IN (
SELECT books.id
@ -78,34 +74,34 @@ fn remove_ghost_books_from_db(tx: &Transaction) -> usize {
LEFT OUTER JOIN files
ON books.id = files.book_id
WHERE files.filename is NULL
)"#,
)",
)
.unwrap();
let num = stmt.execute(NO_PARAMS).unwrap();
tx.execute(
r#"DELETE FROM books_settings WHERE bookid NOT IN ( SELECT id FROM books_impl )"#,
r"DELETE FROM books_settings WHERE bookid NOT IN ( SELECT id FROM books_impl )",
NO_PARAMS,
)
.unwrap();
tx.execute(
r#"DELETE FROM books_uids WHERE book_id NOT IN ( SELECT id FROM books_impl )"#,
r"DELETE FROM books_uids WHERE book_id NOT IN ( SELECT id FROM books_impl )",
NO_PARAMS,
)
.unwrap();
tx.execute(
r#"DELETE FROM bookshelfs_books WHERE bookid NOT IN ( SELECT id FROM books_impl )"#,
r"DELETE FROM bookshelfs_books WHERE bookid NOT IN ( SELECT id FROM books_impl )",
NO_PARAMS,
)
.unwrap();
tx.execute(
r#"DELETE FROM booktogenre WHERE bookid NOT IN ( SELECT id FROM books_impl )"#,
r"DELETE FROM booktogenre WHERE bookid NOT IN ( SELECT id FROM books_impl )",
NO_PARAMS,
)
.unwrap();
tx.execute(
r#"DELETE FROM social WHERE bookid NOT IN ( SELECT id FROM books_impl )"#,
r"DELETE FROM social WHERE bookid NOT IN ( SELECT id FROM books_impl )",
NO_PARAMS,
)
.unwrap();
@ -118,7 +114,6 @@ struct Statistics {
ghost_books_cleaned: usize,
drm_skipped: usize,
genres_fixed: usize,
sorting_fixed: usize,
}
fn fix_db_entries(tx: &Transaction, book_entries: &Vec<BookEntry>) -> Statistics {
@ -127,7 +122,6 @@ fn fix_db_entries(tx: &Transaction, book_entries: &Vec<BookEntry>) -> Statistics
ghost_books_cleaned: 0,
drm_skipped: 0,
genres_fixed: 0,
sorting_fixed: 0,
};
for entry in book_entries {
@ -137,14 +131,17 @@ fn fix_db_entries(tx: &Transaction, book_entries: &Vec<BookEntry>) -> Statistics
}
if let Some(epub_metadata) = epub::get_epub_metadata(&entry.filepath) {
// Fix firstauthor…
let mut firstauthors = epub_metadata
let authors = epub_metadata
.authors
.iter()
.filter(|aut| aut.firstauthor.len() > 0)
.collect::<Vec<_>>();
// Fix firstauthor…
let firstauthors = authors
.iter()
.map(|aut| aut.firstauthor.clone())
.collect::<Vec<_>>();
firstauthors.sort();
if !firstauthors.iter().all(|s| entry.firstauthor.contains(s)) {
let mut stmt = tx
.prepare("UPDATE books_impl SET firstauthor = :file_as WHERE id = :book_id")
@ -156,28 +153,8 @@ fn fix_db_entries(tx: &Transaction, book_entries: &Vec<BookEntry>) -> Statistics
stat.authors_fixed = stat.authors_fixed + 1;
}
// Fix first_author_letter
let first_author_letter = firstauthors
.join(" & ")
.chars()
.next()
.unwrap_or_default()
.to_string()
.to_uppercase();
if entry.first_author_letter != first_author_letter {
let mut stmt = tx
.prepare("UPDATE books_impl SET first_author_letter = :first_letter WHERE id = :book_id")
.unwrap();
stmt.execute_named(
named_params![":first_letter": first_author_letter,":book_id": entry.id],
)
.unwrap();
stat.sorting_fixed = stat.sorting_fixed + 1;
}
// Fix author names…
let authornames = epub_metadata
.authors
let authornames = authors
.iter()
.map(|aut| aut.name.clone())
.collect::<Vec<_>>();
@ -192,7 +169,6 @@ fn fix_db_entries(tx: &Transaction, book_entries: &Vec<BookEntry>) -> Statistics
stat.authors_fixed = stat.authors_fixed + 1;
}
// Fix genre…
if entry.genre.is_empty() && epub_metadata.genre.len() > 0 {
let mut stmt = tx
.prepare(r#"INSERT INTO genres (name) SELECT :genre ON CONFLICT DO NOTHING"#)
@ -279,12 +255,10 @@ fn main() {
pocketbook::Icon::Info,
&format!(
"Authors fixed: {}\n\
Sorting fixed: {}\n\
Genres fixed: {}\n\
Books skipped (DRM): {}\n\
Books cleaned from DB: {}",
&stat.authors_fixed,
&stat.sorting_fixed,
&stat.genres_fixed,
&stat.drm_skipped,
&stat.ghost_books_cleaned
@ -295,15 +269,10 @@ fn main() {
} else {
println!(
"Authors fixed: {}\n\
Sorting fixed: {}\n\
Genres fixed: {}\n\
Books skipped (DRM): {}\n\
Books cleaned from DB: {}",
&stat.authors_fixed,
&stat.sorting_fixed,
&stat.genres_fixed,
&stat.drm_skipped,
&stat.ghost_books_cleaned
&stat.authors_fixed, &stat.genres_fixed, &stat.drm_skipped, &stat.ghost_books_cleaned
);
}
}