update logs and scripts
This commit is contained in:
parent
06957e053f
commit
cc6e4de409
6 changed files with 44762 additions and 44602 deletions
|
@ -1,58 +1,101 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"Title": "Good-Bye to All That: An Autobiography",
|
"publishers": [
|
||||||
"Author": "Robert Graves",
|
"O'Reilly Media"
|
||||||
"ISBN": null,
|
],
|
||||||
"ISBN13": null,
|
"title": "Designing Data-Intensive Applications: The Big Ideas Behind Reliable, Scalable, and Maintainable Systems",
|
||||||
"Publisher": "Anchor Books",
|
"number_of_pages": 624,
|
||||||
"Binding": "Paperback",
|
"covers": [
|
||||||
"Number of Pages": 347,
|
8434671
|
||||||
"Year Published": "1958",
|
],
|
||||||
"Original Publication Year": "1929",
|
"isbn_13": "9781449373320",
|
||||||
"Date Added": "2013-05-07",
|
"isbn_10": "1449373321",
|
||||||
"Date Started": "2024-01-01",
|
"publish_date": "Apr 02, 2017",
|
||||||
"Read Count": 0
|
"authors": [
|
||||||
},
|
{
|
||||||
{
|
"id": "OL7477772A",
|
||||||
"Title": "The End of Policing",
|
"name": "Martin Kleppmann"
|
||||||
"Author": "Alex S. Vitale",
|
}
|
||||||
"ISBN": null,
|
],
|
||||||
"ISBN13": null,
|
"oclc_numbers": [
|
||||||
"Publisher": "Verso",
|
"976434277"
|
||||||
"Binding": "Kindle Edition",
|
],
|
||||||
"Number of Pages": 272,
|
"work": {
|
||||||
"Year Published": "2017",
|
"id": "OL19293745W",
|
||||||
"Original Publication Year": "2017",
|
"title": "Designing Data-Intensive Applications",
|
||||||
"Date Added": "2020-06-05",
|
"subjects": [
|
||||||
"Date Started": "2024-01-01",
|
"Development",
|
||||||
"Read Count": 0
|
"Web site development",
|
||||||
},
|
"Application software",
|
||||||
{
|
"Database management",
|
||||||
"Title": "France",
|
"Databases",
|
||||||
"Series": "Lonely Planet",
|
"COMPUTERS",
|
||||||
"Author": "Lonely Planet",
|
"Desktop Applications",
|
||||||
"ISBN13": "9781788680513",
|
"Project Management software",
|
||||||
"Publisher": "Lonely Planet Global Limited",
|
"Application software--development",
|
||||||
"Binding": "Paperback",
|
"Tk5105.888 .q44 2017",
|
||||||
"Number of Pages": 1021,
|
"005.276"
|
||||||
"Year Published": "2021",
|
]
|
||||||
"Original Publication Year": "1994",
|
},
|
||||||
"Date Added": "2024-01-02",
|
"id": "OL26780701M",
|
||||||
"Date Started": "2023-12-25",
|
"date_added": "2024-01-17",
|
||||||
"Read Count": 0
|
"date_started": "2024-01-17",
|
||||||
},
|
"added_by_id": "9781449373320"
|
||||||
{
|
},
|
||||||
"Title": "The Design of Everyday Things",
|
{
|
||||||
"Author": "Donald A. Norman",
|
"Title": "Good-Bye to All That: An Autobiography",
|
||||||
"ISBN": "0465067107",
|
"Author": "Robert Graves",
|
||||||
"ISBN13": "9780465067107",
|
"ISBN": null,
|
||||||
"Publisher": "Basic Books",
|
"ISBN13": null,
|
||||||
"Binding": "Paperback",
|
"Publisher": "Anchor Books",
|
||||||
"Number of Pages": 240,
|
"Binding": "Paperback",
|
||||||
"Year Published": "2002",
|
"Number of Pages": 347,
|
||||||
"Original Publication Year": "1988",
|
"Year Published": "1958",
|
||||||
"Date Added": "2021-12-01",
|
"Original Publication Year": "1929",
|
||||||
"Date Started": "2023-12-24",
|
"Date Added": "2013-05-07",
|
||||||
"Read Count": 0
|
"Date Started": "2024-01-01",
|
||||||
}
|
"Read Count": 0
|
||||||
]
|
},
|
||||||
|
{
|
||||||
|
"Title": "The End of Policing",
|
||||||
|
"Author": "Alex S. Vitale",
|
||||||
|
"ISBN": null,
|
||||||
|
"ISBN13": null,
|
||||||
|
"Publisher": "Verso",
|
||||||
|
"Binding": "Kindle Edition",
|
||||||
|
"Number of Pages": 272,
|
||||||
|
"Year Published": "2017",
|
||||||
|
"Original Publication Year": "2017",
|
||||||
|
"Date Added": "2020-06-05",
|
||||||
|
"Date Started": "2024-01-01",
|
||||||
|
"Read Count": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Title": "France",
|
||||||
|
"Series": "Lonely Planet",
|
||||||
|
"Author": "Lonely Planet",
|
||||||
|
"ISBN13": "9781788680513",
|
||||||
|
"Publisher": "Lonely Planet Global Limited",
|
||||||
|
"Binding": "Paperback",
|
||||||
|
"Number of Pages": 1021,
|
||||||
|
"Year Published": "2021",
|
||||||
|
"Original Publication Year": "1994",
|
||||||
|
"Date Added": "2024-01-02",
|
||||||
|
"Date Started": "2023-12-25",
|
||||||
|
"Read Count": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"Title": "The Design of Everyday Things",
|
||||||
|
"Author": "Donald A. Norman",
|
||||||
|
"ISBN": "0465067107",
|
||||||
|
"ISBN13": "9780465067107",
|
||||||
|
"Publisher": "Basic Books",
|
||||||
|
"Binding": "Paperback",
|
||||||
|
"Number of Pages": 240,
|
||||||
|
"Year Published": "2002",
|
||||||
|
"Original Publication Year": "1988",
|
||||||
|
"Date Added": "2021-12-01",
|
||||||
|
"Date Started": "2023-12-24",
|
||||||
|
"Read Count": 0
|
||||||
|
}
|
||||||
|
]
|
|
@ -1,4 +1,51 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"title": "Le D\u00e9clic - Int\u00e9grale noir et blanc",
|
||||||
|
"authors": [
|
||||||
|
{
|
||||||
|
"id": "OL46833A",
|
||||||
|
"name": "Milo Manara"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"publish_date": "Sep 02, 2009",
|
||||||
|
"number_of_pages": 240,
|
||||||
|
"publishers": [
|
||||||
|
"\u00c9ditions Gl\u00e9nat"
|
||||||
|
],
|
||||||
|
"physical_format": "hardcover",
|
||||||
|
"covers": [
|
||||||
|
13995197
|
||||||
|
],
|
||||||
|
"languages": [
|
||||||
|
"fre"
|
||||||
|
],
|
||||||
|
"description": "Le monument de l'\u00e9rotisme en bande dessin\u00e9e.\r\n\r\nDans un nouveau format, dans une traduction r\u00e9vis\u00e9e et pr\u00e9sent\u00e9e avec un nouveau lettrage, retrouvez l'int\u00e9grale des quatre volumes du chef-d'oeuvre \u00e9rotique de Milo Manara. En parall\u00e8le \u00e0 la nouvelle \u00e9dition en couleurs des volumes unitaires grand format, voici en un volume unique, pour bibliophiles avertis, la BD culte dans sa pr\u00e9sentation d'origine en noir et blanc. Plus charnel, sensuel et torride que jamais, laissez-vous happer par le tourbillon des sens, et succombez aux d\u00e9lices du plaisir.",
|
||||||
|
"isbn_10": "2723472442",
|
||||||
|
"isbn_13": "9782723472449",
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"role": "Translator",
|
||||||
|
"name": "Aurore Schmid"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"work": {
|
||||||
|
"id": "OL608217W",
|
||||||
|
"title": "Il gioco",
|
||||||
|
"subjects": [
|
||||||
|
"Erotic literature",
|
||||||
|
"Fiction, erotica",
|
||||||
|
"Comics & graphic novels, general"
|
||||||
|
],
|
||||||
|
"original_language": "ita"
|
||||||
|
},
|
||||||
|
"id": "OL38885077M",
|
||||||
|
"published_in": "Grenoble",
|
||||||
|
"date_added": "2024-01-16",
|
||||||
|
"date_started": "2024-01-16",
|
||||||
|
"date_finished": "2024-01-16",
|
||||||
|
"added_by_id": "9782723472449",
|
||||||
|
"comments": "Read in French"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"title": "Le K\u00e2ma S\u00fbtra",
|
"title": "Le K\u00e2ma S\u00fbtra",
|
||||||
"publishers": [
|
"publishers": [
|
||||||
|
@ -9570,4 +9617,4 @@
|
||||||
"Date Finished": "",
|
"Date Finished": "",
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
}
|
}
|
||||||
]
|
]
|
89007
data/books/wishlist.json
89007
data/books/wishlist.json
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,15 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"id": 241100,
|
||||||
|
"name": "Mr Bates vs The Post Office",
|
||||||
|
"overview": "One of the greatest miscarriages of justice in British legal history where hundreds of innocent sub-postmasters and postmistresses were wrongly accused of theft, fraud and false accounting due to a defective IT system.",
|
||||||
|
"poster_path": "/cuJy9NE50upYqSqmft3zqleW7eb.jpg",
|
||||||
|
"first_air_date": "2024-01-01",
|
||||||
|
"origin_country": [
|
||||||
|
"GB"
|
||||||
|
],
|
||||||
|
"date_added": "2024-01-15"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": 133232,
|
"id": 133232,
|
||||||
"name": "Africa",
|
"name": "Africa",
|
||||||
|
@ -2749,4 +2760,4 @@
|
||||||
{
|
{
|
||||||
"Show Title": "Yu-Gi-Oh: The Abridged Series"
|
"Show Title": "Yu-Gi-Oh: The Abridged Series"
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -9,20 +9,44 @@ import re
|
||||||
import requests
|
import requests
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
logging.basicConfig(filename='./logs/run.log', encoding='utf-8', level=logging.DEBUG)
|
def setup_logger(name = __name__):
|
||||||
|
logging.root.setLevel(logging.NOTSET)
|
||||||
|
|
||||||
|
logger = logging.getLogger(name)
|
||||||
|
|
||||||
|
c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler('./logs/run.log')
|
||||||
|
|
||||||
|
c_handler.setLevel(logging.INFO)
|
||||||
|
f_handler.setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
|
||||||
|
c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
logger = setup_logger()
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
||||||
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
||||||
|
|
||||||
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
if "" == TMDB_API_KEY: logger.error("TMDB API key not found")
|
||||||
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
if "" == TVDB_API_KEY: logger.error("TVDB API key not found")
|
||||||
|
|
||||||
|
|
||||||
def add_item_to_log(item_id, media_type, log):
|
def add_item_to_log(item_id, media_type, log):
|
||||||
"""Add a film, book, TV series or TV episode to a log"""
|
"""Add a film, book, TV series or TV episode to a log"""
|
||||||
logging.info(f"Processing {item_id}…")
|
|
||||||
|
logger.info(f"Processing {item_id}…")
|
||||||
|
|
||||||
item = import_by_id(item_id, media_type)
|
item = import_by_id(item_id, media_type)
|
||||||
|
|
||||||
|
@ -60,7 +84,7 @@ def add_item_to_log(item_id, media_type, log):
|
||||||
if 'y' != input("\nDoes this look correct? [y]: "): return
|
if 'y' != input("\nDoes this look correct? [y]: "): return
|
||||||
|
|
||||||
# Save changes
|
# Save changes
|
||||||
logging.info(f"Adding {media_type} to {log}…")
|
logger.info(f"Adding {media_type} to {log}…")
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "r") as log_file:
|
with open(f"./data/{media_type}/{log}.json", "r") as log_file:
|
||||||
log_items = json.load(log_file)
|
log_items = json.load(log_file)
|
||||||
|
@ -70,7 +94,7 @@ def add_item_to_log(item_id, media_type, log):
|
||||||
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
logging.info(f"Added {media_type} {item_id} to {log}")
|
logger.info(f"Added {media_type} {item_id} to {log}")
|
||||||
|
|
||||||
|
|
||||||
def import_by_id(import_id, media_type):
|
def import_by_id(import_id, media_type):
|
||||||
|
@ -100,7 +124,7 @@ def import_from_imdb_by_id(imdb_id, media_type):
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
if (200 == response.status_code):
|
if (200 == response.status_code):
|
||||||
logging.info(response.status_code)
|
logger.info(response.status_code)
|
||||||
|
|
||||||
elif (429 == response.status_code):
|
elif (429 == response.status_code):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
@ -123,7 +147,7 @@ def import_from_imdb_by_id(imdb_id, media_type):
|
||||||
raise Exception(f"Returned no results for {imdb_id}")
|
raise Exception(f"Returned no results for {imdb_id}")
|
||||||
|
|
||||||
elif 1 < len(response_data):
|
elif 1 < len(response_data):
|
||||||
logging.warning(f"Returned more than one {media_type} for ID '{imdb_id}'")
|
logger.warning(f"Returned more than one {media_type} for ID '{imdb_id}'")
|
||||||
print(f"Returned more than one {media_type} for ID '{imdb_id}':\n")
|
print(f"Returned more than one {media_type} for ID '{imdb_id}':\n")
|
||||||
print(json.dumps(response_data, indent=4))
|
print(json.dumps(response_data, indent=4))
|
||||||
idx = input("\nEnter the index of the result to use: ")
|
idx = input("\nEnter the index of the result to use: ")
|
||||||
|
@ -150,7 +174,7 @@ def import_from_openlibrary_by_id(isbn, media_type):
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
if (200 == response.status_code):
|
if (200 == response.status_code):
|
||||||
logging.info(response.status_code)
|
logger.info(response.status_code)
|
||||||
|
|
||||||
elif (429 == response.status_code):
|
elif (429 == response.status_code):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
@ -158,7 +182,7 @@ def import_from_openlibrary_by_id(isbn, media_type):
|
||||||
return
|
return
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Error {reponse.status_code}: {response.text}")
|
raise Exception(f"Error {response.status_code}: {response.text}")
|
||||||
|
|
||||||
item = json.loads(response.text)
|
item = json.loads(response.text)
|
||||||
|
|
||||||
|
@ -195,7 +219,7 @@ def import_from_openlibrary_by_ol_key(key):
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
if (200 == response.status_code):
|
if (200 == response.status_code):
|
||||||
logging.info(response.status_code)
|
logger.info(response.status_code)
|
||||||
|
|
||||||
elif (429 == response.status_code):
|
elif (429 == response.status_code):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
@ -203,7 +227,7 @@ def import_from_openlibrary_by_ol_key(key):
|
||||||
return
|
return
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Error {reponse.status_code}: {response.text}")
|
raise Exception(f"Error {response.status_code}: {response.text}")
|
||||||
|
|
||||||
item = json.loads(response.text)
|
item = json.loads(response.text)
|
||||||
|
|
||||||
|
@ -252,6 +276,7 @@ def cleanup_result(item, media_type):
|
||||||
'lc_classifications', # OpenLibrary
|
'lc_classifications', # OpenLibrary
|
||||||
'local_id', # OpenLibrary
|
'local_id', # OpenLibrary
|
||||||
'ocaid', # OpenLibrary
|
'ocaid', # OpenLibrary
|
||||||
|
'oclc_numbers', # OpenLibrary
|
||||||
'popularity', # TMDB
|
'popularity', # TMDB
|
||||||
'production_code', # TMDB
|
'production_code', # TMDB
|
||||||
'revision', # OpenLibrary
|
'revision', # OpenLibrary
|
||||||
|
@ -362,7 +387,7 @@ def main():
|
||||||
add_item_to_log(imdb_id, media_type, log)
|
add_item_to_log(imdb_id, media_type, log)
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logging.error(repr(error))
|
logger.exception("Exception occurred")
|
||||||
print(error)
|
print(error)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,25 +1,26 @@
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
import json
|
import json
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
from add_item import cleanup_result, import_by_id
|
from add_item import cleanup_result, import_by_id, setup_logger
|
||||||
|
|
||||||
logging.basicConfig(filename='./logs/run.log', encoding='utf-8', level=logging.DEBUG)
|
logger = setup_logger(__name__)
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
||||||
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
||||||
|
|
||||||
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
if "" == TMDB_API_KEY: logger.warning("TMDB API key not found")
|
||||||
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
if "" == TVDB_API_KEY: logger.warning("TVDB API key not found")
|
||||||
|
|
||||||
def process_log(media_type, log):
|
def process_log(media_type, log):
|
||||||
logging.info(f"Processing {media_type}/{log}…")
|
"""Run through a log and call the appropriate API for each item found"""
|
||||||
|
|
||||||
|
logger.info(f"Processing {media_type}/{log}…")
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "r") as log_file:
|
with open(f"./data/{media_type}/{log}.json", "r") as log_file:
|
||||||
log_items = json.load(log_file)
|
log_items = json.load(log_file)
|
||||||
|
@ -33,7 +34,7 @@ def process_log(media_type, log):
|
||||||
elif 'tv-episodes' == media_type: item_title = item['Episode Title']
|
elif 'tv-episodes' == media_type: item_title = item['Episode Title']
|
||||||
elif 'tv-series' == media_type: item_title = item['Show Title']
|
elif 'tv-series' == media_type: item_title = item['Show Title']
|
||||||
|
|
||||||
logging.debug(f"Processing {item_title}…")
|
logger.debug(f"Processing {item_title}…")
|
||||||
|
|
||||||
# Rename pre-existing fields
|
# Rename pre-existing fields
|
||||||
if 'Date Added' in item:
|
if 'Date Added' in item:
|
||||||
|
@ -71,7 +72,7 @@ def process_log(media_type, log):
|
||||||
season_no = None
|
season_no = None
|
||||||
episode_no = item['episode_number'][1:]
|
episode_no = item['episode_number'][1:]
|
||||||
else:
|
else:
|
||||||
logging.error(f"Invalid episode number format '{item['Episode Number']}'")
|
logger.error(f"Invalid episode number format '{item['Episode Number']}'")
|
||||||
return
|
return
|
||||||
|
|
||||||
log_item_values['season_number'] = season_no
|
log_item_values['season_number'] = season_no
|
||||||
|
@ -94,7 +95,7 @@ def process_log(media_type, log):
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Skipped {item_title}")
|
logger.warning(f"Skipped {item_title}")
|
||||||
|
|
||||||
if log_items[i] is not None: log_items[i] |= log_item_values
|
if log_items[i] is not None: log_items[i] |= log_item_values
|
||||||
|
|
||||||
|
@ -104,10 +105,12 @@ def process_log(media_type, log):
|
||||||
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
logging.info(f"Finished processing {media_type}/{log}")
|
logger.info(f"Finished processing {media_type}/{log}")
|
||||||
|
|
||||||
|
|
||||||
def import_by_details(item, item_title, media_type):
|
def import_by_details(item, item_title, media_type):
|
||||||
|
"""Import an item when lacking a unique identifier"""
|
||||||
|
|
||||||
if media_type in ['films', 'tv-series']:
|
if media_type in ['films', 'tv-series']:
|
||||||
return import_from_tmdb_by_details(item, item_title, media_type)
|
return import_from_tmdb_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
|
@ -124,7 +127,7 @@ def import_by_details(item, item_title, media_type):
|
||||||
def import_from_tmdb_by_details(item, item_title, media_type):
|
def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
"""Retrieve a film or TV series from TMDB using its title"""
|
"""Retrieve a film or TV series from TMDB using its title"""
|
||||||
|
|
||||||
logging.info(f"Processing {item_title}…")
|
logger.info(f"Processing {item_title}…")
|
||||||
|
|
||||||
api_url = f"https://api.themoviedb.org/3/search/{'movie' if 'films' == media_type else 'tv'}"
|
api_url = f"https://api.themoviedb.org/3/search/{'movie' if 'films' == media_type else 'tv'}"
|
||||||
|
|
||||||
|
@ -141,12 +144,12 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
if (200 == response.status_code):
|
if (200 == response.status_code):
|
||||||
logging.info(response.status_code)
|
logger.info(response.status_code)
|
||||||
elif (429 == response.status_code):
|
elif (429 == response.status_code):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
import_from_tmdb_by_details(item)
|
import_from_tmdb_by_details(item)
|
||||||
else:
|
else:
|
||||||
logging.error(response.text)
|
logger.error(response.text)
|
||||||
|
|
||||||
response_data = json.loads(response.text)['results']
|
response_data = json.loads(response.text)['results']
|
||||||
|
|
||||||
|
@ -154,7 +157,7 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
return cleanup_result(response_data[0], media_type)
|
return cleanup_result(response_data[0], media_type)
|
||||||
|
|
||||||
elif 0 == len(response_data):
|
elif 0 == len(response_data):
|
||||||
logging.warning(f"Returned no {media_type} for {item_title}")
|
logger.warning(f"Returned no {media_type} for {item_title}")
|
||||||
|
|
||||||
elif 1 < len(response_data):
|
elif 1 < len(response_data):
|
||||||
if 'films' == media_type: title_key = 'title'
|
if 'films' == media_type: title_key = 'title'
|
||||||
|
@ -166,7 +169,7 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
return cleanup_result(response_data[0], media_type)
|
return cleanup_result(response_data[0], media_type)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Returned more than one {media_type} for '{item_title}':\n")
|
logger.warning(f"Returned more than one {media_type} for '{item_title}':\n")
|
||||||
print(json.dumps(response_data, indent=4))
|
print(json.dumps(response_data, indent=4))
|
||||||
idx = input("\nEnter the index of the result to use: ")
|
idx = input("\nEnter the index of the result to use: ")
|
||||||
|
|
||||||
|
@ -175,7 +178,7 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
return cleanup_result(response_data[int(idx)], media_type)
|
return cleanup_result(response_data[int(idx)], media_type)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
logging.error("Index invalid!")
|
logger.error("Index invalid!")
|
||||||
print("Index invalid!")
|
print("Index invalid!")
|
||||||
|
|
||||||
item['IMDB ID'] = input(f"Enter IMDB ID for {item_title}: ")
|
item['IMDB ID'] = input(f"Enter IMDB ID for {item_title}: ")
|
||||||
|
@ -183,38 +186,46 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
if '' != item['IMDB ID']:
|
if '' != item['IMDB ID']:
|
||||||
return import_by_id(item['IMDB ID'], media_type)
|
return import_by_id(item['IMDB ID'], media_type)
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Skipped {item_title}")
|
logger.warning(f"Skipped {item_title}")
|
||||||
return item
|
return item
|
||||||
|
|
||||||
|
|
||||||
media_type = ''
|
def main():
|
||||||
while media_type not in ['films', 'tv-episodes', 'tv-series', 'books']:
|
media_type = ''
|
||||||
media_type = input("Select media type [films|tv-episodes|tv-series|books]: ")
|
while media_type not in ['films', 'tv-episodes', 'tv-series', 'books']:
|
||||||
|
media_type = input("Select media type [films|tv-episodes|tv-series|books]: ")
|
||||||
|
|
||||||
if 'films' == media_type:
|
try:
|
||||||
log = ''
|
if 'films' == media_type:
|
||||||
while log not in ['log', 'wishlist']:
|
log = ''
|
||||||
log = input ("Enter log to process [log|wishlist]:")
|
while log not in ['log', 'wishlist']:
|
||||||
|
log = input ("Enter log to process [log|wishlist]:")
|
||||||
|
|
||||||
process_log(media_type, log)
|
process_log(media_type, log)
|
||||||
|
|
||||||
elif 'books' == media_type:
|
elif 'books' == media_type:
|
||||||
log = ''
|
log = ''
|
||||||
while log not in ['log', 'current', 'wishlist']:
|
while log not in ['log', 'current', 'wishlist']:
|
||||||
log = input ("Enter log to process [log|current|wishlist]:")
|
log = input ("Enter log to process [log|current|wishlist]:")
|
||||||
|
|
||||||
# TODO
|
# TODO
|
||||||
|
|
||||||
elif 'tv-episodes' == media_type:
|
elif 'tv-episodes' == media_type:
|
||||||
process_log(media_type, 'log')
|
process_log(media_type, 'log')
|
||||||
|
|
||||||
# TODO
|
# TODO
|
||||||
|
|
||||||
elif 'tv-series' == media_type:
|
elif 'tv-series' == media_type:
|
||||||
log = ''
|
log = ''
|
||||||
while log not in ['log', 'current', 'wishlist']:
|
while log not in ['log', 'current', 'wishlist']:
|
||||||
log = input ("Enter log to process [log|current|wishlist]:")
|
log = input ("Enter log to process [log|current|wishlist]:")
|
||||||
|
|
||||||
process_log(media_type, log)
|
process_log(media_type, log)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
logger.exception("Exception occurred")
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
Loading…
Reference in a new issue