fix book log dates
This commit is contained in:
parent
f1499586df
commit
0a65f25b60
6 changed files with 12486 additions and 10743 deletions
|
@ -7,7 +7,7 @@
|
||||||
"full_title": "Nonviolent communication a language of life",
|
"full_title": "Nonviolent communication a language of life",
|
||||||
"authors": [
|
"authors": [
|
||||||
{
|
{
|
||||||
"id": "OL243612A",
|
"ol_author_id": "OL243612A",
|
||||||
"name": "Marshall B. Rosenberg"
|
"name": "Marshall B. Rosenberg"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -35,7 +35,7 @@
|
||||||
"PuddleDancer Press"
|
"PuddleDancer Press"
|
||||||
],
|
],
|
||||||
"work": {
|
"work": {
|
||||||
"id": "OL2018966W",
|
"ol_work_id": "OL2018966W",
|
||||||
"title": "Nonviolent Communication",
|
"title": "Nonviolent Communication",
|
||||||
"first_publish_date": "1999",
|
"first_publish_date": "1999",
|
||||||
"subjects": [
|
"subjects": [
|
||||||
|
@ -68,7 +68,7 @@
|
||||||
"Self-improvement"
|
"Self-improvement"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"id": "OL27210498M",
|
"ol_edition_id": "OL27210498M",
|
||||||
"date_added": "2019-11-09",
|
"date_added": "2019-11-09",
|
||||||
"date_started": "2024-02-13",
|
"date_started": "2024-02-13",
|
||||||
"added_by_id": "9781892005281"
|
"added_by_id": "9781892005281"
|
||||||
|
@ -87,7 +87,7 @@
|
||||||
"publish_date": "Apr 02, 2017",
|
"publish_date": "Apr 02, 2017",
|
||||||
"authors": [
|
"authors": [
|
||||||
{
|
{
|
||||||
"id": "OL7477772A",
|
"ol_author_id": "OL7477772A",
|
||||||
"name": "Martin Kleppmann"
|
"name": "Martin Kleppmann"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -95,7 +95,7 @@
|
||||||
"976434277"
|
"976434277"
|
||||||
],
|
],
|
||||||
"work": {
|
"work": {
|
||||||
"id": "OL19293745W",
|
"ol_work_id": "OL19293745W",
|
||||||
"title": "Designing Data-Intensive Applications",
|
"title": "Designing Data-Intensive Applications",
|
||||||
"subjects": [
|
"subjects": [
|
||||||
"Development",
|
"Development",
|
||||||
|
@ -111,7 +111,7 @@
|
||||||
"005.276"
|
"005.276"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"id": "OL26780701M",
|
"ol_edition_id": "OL26780701M",
|
||||||
"date_added": "2021-06-26",
|
"date_added": "2021-06-26",
|
||||||
"date_started": "2024-01-17",
|
"date_started": "2024-01-17",
|
||||||
"added_by_id": "9781449373320"
|
"added_by_id": "9781449373320"
|
||||||
|
@ -191,7 +191,7 @@
|
||||||
],
|
],
|
||||||
"isbn_13": "9781788680523",
|
"isbn_13": "9781788680523",
|
||||||
"work": {
|
"work": {
|
||||||
"id": "OL15419603W",
|
"ol_work_id": "OL15419603W",
|
||||||
"title": "France",
|
"title": "France",
|
||||||
"subjects": [
|
"subjects": [
|
||||||
"Guidebooks",
|
"Guidebooks",
|
||||||
|
@ -203,7 +203,7 @@
|
||||||
"Europe - France"
|
"Europe - France"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"id": "OL50982390M",
|
"ol_edition_id": "OL50982390M",
|
||||||
"date_added": "2024-01-02",
|
"date_added": "2024-01-02",
|
||||||
"date_started": "2023-12-25"
|
"date_started": "2023-12-25"
|
||||||
}
|
}
|
||||||
|
|
8349
data/books/log.json
8349
data/books/log.json
File diff suppressed because it is too large
Load diff
11678
data/books/wishlist.json
11678
data/books/wishlist.json
File diff suppressed because it is too large
Load diff
3136
data/films/log.json
3136
data/films/log.json
File diff suppressed because it is too large
Load diff
|
@ -457,6 +457,9 @@ def cleanup_result(item, media_type) -> dict:
|
||||||
del item[field_name]
|
del item[field_name]
|
||||||
|
|
||||||
if media_type in ["films", "tv-series"]:
|
if media_type in ["films", "tv-series"]:
|
||||||
|
item["id"] = item["tmdb_id"]
|
||||||
|
del item["id"]
|
||||||
|
|
||||||
title_key = "name" if "tv-series" == media_type else "title"
|
title_key = "name" if "tv-series" == media_type else "title"
|
||||||
|
|
||||||
if f"original_{title_key}" in item and "original_language" in item:
|
if f"original_{title_key}" in item and "original_language" in item:
|
||||||
|
@ -467,7 +470,7 @@ def cleanup_result(item, media_type) -> dict:
|
||||||
del item[f"original_{title_key}"], item["original_language"]
|
del item[f"original_{title_key}"], item["original_language"]
|
||||||
|
|
||||||
if "books" == media_type:
|
if "books" == media_type:
|
||||||
_, _, item["id"] = item["key"].split("/")
|
_, _, item["ol_id"] = item["key"].split("/")
|
||||||
del item["key"]
|
del item["key"]
|
||||||
|
|
||||||
for key in ["isbn_10", "isbn_13"]:
|
for key in ["isbn_10", "isbn_13"]:
|
||||||
|
|
45
scripts/re-add_dates.py
Normal file
45
scripts/re-add_dates.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
with open("./data/books/log.json", "r", encoding="utf-8") as log_file:
|
||||||
|
log_items = json.load(log_file)
|
||||||
|
with open("./data/books/log-preprocess.json", "r", encoding="utf-8") as log_file:
|
||||||
|
old_log_items = json.load(log_file)[2:]
|
||||||
|
|
||||||
|
for i, item in enumerate(log_items[6:]):
|
||||||
|
if "title" in item:
|
||||||
|
title = item["title"]
|
||||||
|
else:
|
||||||
|
title = item["Title"]
|
||||||
|
|
||||||
|
#print(f"Processing '{title}'...")
|
||||||
|
|
||||||
|
for key in ["started", "finished", "added", "read"]:
|
||||||
|
keyCap = key.capitalize()
|
||||||
|
if (f"date_{key}" not in item and f"Date {keyCap}" not in item) or ( (f"date_{key}" in item and "" == item[f"date_{key}"]) or (f"Date {keyCap}" in item and "" == item[f"Date {keyCap}"])):
|
||||||
|
if f"Date {keyCap}" in old_log_items[i]:
|
||||||
|
if "" == old_log_items[i][f"Date {keyCap}"]:
|
||||||
|
print(
|
||||||
|
f"No date_{key} for {title}, and date in old log is empty"
|
||||||
|
)
|
||||||
|
item[f"date_{key}"] = None
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"No date_{key} for {title}, taking date {old_log_items[i][f'Date {keyCap}']} from old log"
|
||||||
|
)
|
||||||
|
item[f"date_{key}"] = old_log_items[i][f"Date {keyCap}"]
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"No date_{key} for {title}, no value in old log either."
|
||||||
|
)
|
||||||
|
item[f"date_{key}"] = None
|
||||||
|
|
||||||
|
log_items[i] = item
|
||||||
|
|
||||||
|
#print(f"Finished processing {title}.")
|
||||||
|
|
||||||
|
with open(f"./data/books/log.json", "w", encoding="utf-8") as log_file:
|
||||||
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
|
print(f"Finished processing log.")
|
||||||
|
|
Loading…
Reference in a new issue