Compare commits
6 commits
e7c0c8fc6c
...
9dbcc64188
Author | SHA1 | Date | |
---|---|---|---|
9dbcc64188 | |||
70c8d382b0 | |||
b4d64ccd34 | |||
1c6cde3f4c | |||
4a16cb8490 | |||
1f5760073f |
10 changed files with 201630 additions and 67852 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -7,3 +7,4 @@ public/
|
||||||
logs/
|
logs/
|
||||||
.env
|
.env
|
||||||
catalogue_venv/
|
catalogue_venv/
|
||||||
|
scripts/caching/
|
||||||
|
|
|
@ -43,59 +43,287 @@
|
||||||
"added_by_id": "9781449373320"
|
"added_by_id": "9781449373320"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Good-Bye to All That: An Autobiography",
|
"title": "Goodbye to all that",
|
||||||
"Author": "Robert Graves",
|
"authors": [
|
||||||
"ISBN": null,
|
{
|
||||||
"ISBN13": null,
|
"id": "OL1097266A",
|
||||||
"Publisher": "Anchor Books",
|
"name": "Robert Graves",
|
||||||
"Binding": "Paperback",
|
"personal_name": "Graves, Robert"
|
||||||
"Number of Pages": 347,
|
}
|
||||||
"Year Published": "1958",
|
],
|
||||||
"Original Publication Year": "1929",
|
"publish_date": "1960",
|
||||||
"Date Added": "2013-05-07",
|
"publishers": [
|
||||||
"Date Started": "2024-01-01",
|
"Penguin"
|
||||||
"Read Count": 0
|
],
|
||||||
|
"series": [
|
||||||
|
"Penguin twentieth century classics"
|
||||||
|
],
|
||||||
|
"subjects": [
|
||||||
|
"Graves, Robert, -- 1895-1985."
|
||||||
|
],
|
||||||
|
"languages": [
|
||||||
|
"eng"
|
||||||
|
],
|
||||||
|
"edition_name": "Rev. ed. / with a new prologue and epilogue.",
|
||||||
|
"isbn_10": "0140180982",
|
||||||
|
"publish_country": "xxk",
|
||||||
|
"by_statement": "Robert Graves.",
|
||||||
|
"number_of_pages": 282,
|
||||||
|
"covers": [
|
||||||
|
13074261
|
||||||
|
],
|
||||||
|
"work": {
|
||||||
|
"id": "OL5036954W",
|
||||||
|
"title": "Good-Bye to All That",
|
||||||
|
"subjects": [
|
||||||
|
"Biography",
|
||||||
|
"British Personal narratives",
|
||||||
|
"English Authors",
|
||||||
|
"English Personal narratives",
|
||||||
|
"European War, 1914-1918",
|
||||||
|
"Personal narratives",
|
||||||
|
"World War, 1914-1918",
|
||||||
|
"World War (1914-1918) fast (OCoLC)fst01180746",
|
||||||
|
"Biograf\ufffdia",
|
||||||
|
"Social conditions",
|
||||||
|
"Autores ingleses",
|
||||||
|
"Novela inglesa",
|
||||||
|
"BIOGRAPHY & AUTOBIOGRAPHY",
|
||||||
|
"World war, 1914-1918, personal narratives",
|
||||||
|
"Authors, biography",
|
||||||
|
"Authors, english",
|
||||||
|
"Fiction, general"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"id": "OL18237727M",
|
||||||
|
"published_in": [
|
||||||
|
"Harmondsworth"
|
||||||
|
],
|
||||||
|
"date_added": "2024-02-03"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The End of Policing",
|
"title": "The End of Policing",
|
||||||
"Author": "Alex S. Vitale",
|
"publishers": [
|
||||||
"ISBN": null,
|
"Verso Books"
|
||||||
"ISBN13": null,
|
],
|
||||||
"Publisher": "Verso",
|
"publish_date": "October 2017",
|
||||||
"Binding": "Kindle Edition",
|
"isbn_13": "9781784782900",
|
||||||
"Number of Pages": 272,
|
"languages": [
|
||||||
"Year Published": "2017",
|
"eng"
|
||||||
"Original Publication Year": "2017",
|
],
|
||||||
"Date Added": "2020-06-05",
|
"physical_format": "Ebook",
|
||||||
"Date Started": "2024-01-01",
|
"work": {
|
||||||
"Read Count": 0
|
"id": "OL19735566W",
|
||||||
|
"title": "The End of Policing",
|
||||||
|
"subjects": [
|
||||||
|
"Police",
|
||||||
|
"Police misconduct",
|
||||||
|
"black lives matter",
|
||||||
|
"Police-community relations",
|
||||||
|
"Police brutality",
|
||||||
|
"African Americans",
|
||||||
|
"Violence against",
|
||||||
|
"Social conditions",
|
||||||
|
"Discrimination in criminal justice administration",
|
||||||
|
"Race relations",
|
||||||
|
"POLITICAL SCIENCE",
|
||||||
|
"Political Freedom & Security",
|
||||||
|
"Law Enforcement",
|
||||||
|
"SOCIAL SCIENCE",
|
||||||
|
"Discrimination & Race Relations",
|
||||||
|
"Public Policy",
|
||||||
|
"General",
|
||||||
|
"BUSINESS & ECONOMICS",
|
||||||
|
"Infrastructure",
|
||||||
|
"POLITICAL SCIENCE / Political Freedom & Security / Law Enforcement",
|
||||||
|
"SOCIAL SCIENCE / Discrimination & Race Relations",
|
||||||
|
"POLITICAL SCIENCE / Public Policy / General"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"id": "OL50982392M",
|
||||||
|
"date_added": "2024-02-03"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "France",
|
"title": "France",
|
||||||
"Series": "Lonely Planet",
|
"publishers": [
|
||||||
"Author": "Lonely Planet",
|
"Loneley Planet Global"
|
||||||
"ISBN13": "9781788680513",
|
],
|
||||||
"Publisher": "Lonely Planet Global Limited",
|
"publish_date": "November 2021",
|
||||||
"Binding": "Paperback",
|
"covers": [
|
||||||
"Number of Pages": 1021,
|
14575043
|
||||||
"Year Published": "2021",
|
],
|
||||||
"Original Publication Year": "1994",
|
"edition_name": "Fourteenth edition",
|
||||||
"Date Added": "2024-01-02",
|
"languages": [
|
||||||
"Date Started": "2023-12-25",
|
"eng"
|
||||||
"Read Count": 0
|
],
|
||||||
|
"physical_format": "Paperback",
|
||||||
|
"number_of_pages": 1024,
|
||||||
|
"contributors": [
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Joel Balsam"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Alexis Averbuck"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Oliver Berry"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Celeste Brash"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Stuart Butler"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Jean-Bernard Carillet"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Gregor Clark"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Mark Elliott"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Steve Fallon"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Anita Isalska"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Catherine Le Nevez"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Christopher Pitts"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Regis St Louis"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "Additional Author (this edition)",
|
||||||
|
"name": "Ryan Ver Berkmoes"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"isbn_13": "9781788680523",
|
||||||
|
"work": {
|
||||||
|
"id": "OL15419603W",
|
||||||
|
"title": "France",
|
||||||
|
"subjects": [
|
||||||
|
"Guidebooks",
|
||||||
|
"Travel - Foreign",
|
||||||
|
"Special Interest - Family",
|
||||||
|
"Travel",
|
||||||
|
"France",
|
||||||
|
"Travel & holiday guides",
|
||||||
|
"Europe - France"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"id": "OL50982390M",
|
||||||
|
"date_added": "2024-02-03"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Design of Everyday Things",
|
"edition_name": "1st Basic paperback.",
|
||||||
"Author": "Donald A. Norman",
|
"title": "The design of everyday things",
|
||||||
"ISBN": "0465067107",
|
"languages": [
|
||||||
"ISBN13": "9780465067107",
|
"eng"
|
||||||
"Publisher": "Basic Books",
|
],
|
||||||
"Binding": "Paperback",
|
"subjects": [
|
||||||
"Number of Pages": 240,
|
"Design, Industrial -- Psychological aspects",
|
||||||
"Year Published": "2002",
|
"Human engineering"
|
||||||
"Original Publication Year": "1988",
|
],
|
||||||
"Date Added": "2021-12-01",
|
"publish_country": "nyu",
|
||||||
"Date Started": "2023-12-24",
|
"by_statement": "Donald A. Norman.",
|
||||||
"Read Count": 0
|
"publishers": [
|
||||||
|
"Basic Books"
|
||||||
|
],
|
||||||
|
"authors": [
|
||||||
|
{
|
||||||
|
"id": "OL224976A",
|
||||||
|
"name": "Donald A. Norman"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"number_of_pages": 257,
|
||||||
|
"publish_date": "2002",
|
||||||
|
"work_title": [
|
||||||
|
"Psychology of everyday things"
|
||||||
|
],
|
||||||
|
"isbn_10": "0465067107",
|
||||||
|
"covers": [
|
||||||
|
14428210
|
||||||
|
],
|
||||||
|
"work": {
|
||||||
|
"id": "OL1879162W",
|
||||||
|
"title": "The Psychology of Everyday Things",
|
||||||
|
"first_publish_date": "August 1998",
|
||||||
|
"subjects": [
|
||||||
|
"Ergonomie",
|
||||||
|
"Industrial Psychology",
|
||||||
|
"Industri\u00eble vormgeving",
|
||||||
|
"Industrial Design",
|
||||||
|
"Psychological aspects",
|
||||||
|
"Psychology textbooks",
|
||||||
|
"Psychological aspects of Industrial design",
|
||||||
|
"Textbooks",
|
||||||
|
"Aspect psychologique",
|
||||||
|
"Design",
|
||||||
|
"Humanities textbooks",
|
||||||
|
"Projetos (administracao)",
|
||||||
|
"Human engineering",
|
||||||
|
"Long Now Manual for Civilization",
|
||||||
|
"New York Times reviewed",
|
||||||
|
"Cognitive psychology",
|
||||||
|
"Livres num\u00e9riques",
|
||||||
|
"Ergonomics",
|
||||||
|
"E-books",
|
||||||
|
"BUSINESS & ECONOMICS",
|
||||||
|
"Industries",
|
||||||
|
"Retailing",
|
||||||
|
"Product",
|
||||||
|
"PSYCHOLOGY",
|
||||||
|
"Applied Psychology",
|
||||||
|
"Electronic books",
|
||||||
|
"Industriell formgivning",
|
||||||
|
"Psykologiska aspekter",
|
||||||
|
"Industries / Retailing",
|
||||||
|
"Mechanical Engineering",
|
||||||
|
"Engineering & Applied Sciences",
|
||||||
|
"Industrial & Management Engineering",
|
||||||
|
"Nonfiction",
|
||||||
|
"Art",
|
||||||
|
"Business",
|
||||||
|
"Industrial design--psychological aspects",
|
||||||
|
"Psychology, industrial",
|
||||||
|
"Ts171.4 .n67 1990",
|
||||||
|
"Ts 171.4 n842p 1990",
|
||||||
|
"620.8/2",
|
||||||
|
"Business & economics--industries--retailing",
|
||||||
|
"Psychology--applied psychology",
|
||||||
|
"Design--product",
|
||||||
|
"Industriell formgivning--psykologiska aspekter",
|
||||||
|
"Ts171.4 .n67 2013",
|
||||||
|
"745.2019",
|
||||||
|
"745.2001/9",
|
||||||
|
"Bus057000 psy003000 des011000"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"id": "OL3702614M",
|
||||||
|
"published_in": [
|
||||||
|
"New York"
|
||||||
|
],
|
||||||
|
"date_added": "2021-12-01",
|
||||||
|
"date_started": "2023-12-24",
|
||||||
|
"read_count": 0
|
||||||
}
|
}
|
||||||
]
|
]
|
29537
data/books/log.json
29537
data/books/log.json
File diff suppressed because it is too large
Load diff
184669
data/books/wishlist.json
184669
data/books/wishlist.json
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,94 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"belongs_to_collection": null,
|
||||||
|
"genres": [
|
||||||
|
{
|
||||||
|
"id": 99,
|
||||||
|
"name": "Documentary"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 465383,
|
||||||
|
"imdb_id": "tt7162400",
|
||||||
|
"original_language": "ar",
|
||||||
|
"original_title": "Ouroboros",
|
||||||
|
"overview": "This film is an homage to the Gaza Strip and to the possibility of hope beyond hopelessness. Ouroboros, the symbol of the snake eating its tail, is both end and beginning: death as regeneration. A 74-minute experimental narrative film that turns the destruction of Gaza into a story of heartbreak, Ouroboros asks what it means to be human when humanity has failed. Taking the form of a love story, the film's central character is Diego Marcon, a man who embarks on a circular journey to shed his pain only to experience it, again and again. In the course of a single day, his travel fuses together Native American territories, the ancient Italian city of Matera, a castle in Brittany, and the ruins of the Gaza Strip into a single landscape.",
|
||||||
|
"poster_path": "/37W4z9dkK77OTLMbDO9KvVADxXX.jpg",
|
||||||
|
"production_countries": [
|
||||||
|
{
|
||||||
|
"iso_3166_1": "BE",
|
||||||
|
"name": "Belgium"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"iso_3166_1": "FR",
|
||||||
|
"name": "France"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"iso_3166_1": "PS",
|
||||||
|
"name": "Palestinian Territory"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"iso_3166_1": "QA",
|
||||||
|
"name": "Qatar"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"release_date": "2017-04-29",
|
||||||
|
"spoken_languages": [
|
||||||
|
{
|
||||||
|
"english_name": "Arabic",
|
||||||
|
"iso_639_1": "ar",
|
||||||
|
"name": "\u0627\u0644\u0639\u0631\u0628\u064a\u0629"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"english_name": "English",
|
||||||
|
"iso_639_1": "en",
|
||||||
|
"name": "English"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"english_name": "Italian",
|
||||||
|
"iso_639_1": "it",
|
||||||
|
"name": "Italiano"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Ouroboros",
|
||||||
|
"date_added": "2024-02-03",
|
||||||
|
"date_finished": "2024-02-03",
|
||||||
|
"added_by_id": "465383",
|
||||||
|
"comments": "Radical Approaches to Filmmaking course"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"belongs_to_collection": null,
|
||||||
|
"genres": [
|
||||||
|
{
|
||||||
|
"id": 18,
|
||||||
|
"name": "Drama"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"id": 44967,
|
||||||
|
"imdb_id": "tt0015361",
|
||||||
|
"original_language": "ru",
|
||||||
|
"original_title": "\u0421\u0442\u0430\u0447\u043a\u0430",
|
||||||
|
"overview": "Workers in a factory in pre-revolutionary Russia go on strike and are met by violent suppression.",
|
||||||
|
"poster_path": "/mrDTaMxQKxWZ3uEs2G58vJWJDDL.jpg",
|
||||||
|
"production_countries": [
|
||||||
|
{
|
||||||
|
"iso_3166_1": "SU",
|
||||||
|
"name": "Soviet Union"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"release_date": "1925-04-28",
|
||||||
|
"spoken_languages": [
|
||||||
|
{
|
||||||
|
"english_name": "No Language",
|
||||||
|
"iso_639_1": "xx",
|
||||||
|
"name": "No Language"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"title": "Strike",
|
||||||
|
"date_added": "2024-02-03",
|
||||||
|
"date_finished": "2024-02-03",
|
||||||
|
"added_by_id": "44967",
|
||||||
|
"comments": "Radical Approaches to Filmmaking course"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": 14572,
|
"id": 14572,
|
||||||
"original_language": "ja",
|
"original_language": "ja",
|
||||||
|
@ -15996,4 +16086,4 @@
|
||||||
"date_finished": null,
|
"date_finished": null,
|
||||||
"is_repeat": false
|
"is_repeat": false
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -1,27 +1,29 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"Title": "Hellblade: Senua's Sacrifice",
|
"Title": "Death Stranding",
|
||||||
"Series": "Hellblade",
|
"Date Started": "2024-01-25",
|
||||||
"Developers": "Ninja Theory Ltd.",
|
"Platforms": "PC",
|
||||||
"Date Released": "2017-08-08",
|
"Developers": "Kojima Productions",
|
||||||
"Date Started": "2024-01-21",
|
"Date Released": "2019-11-08",
|
||||||
"GiantBomb ID": "47363"
|
"GiantBomb ID": "54232",
|
||||||
},
|
"date_added": "2019-12-04T21:27:08Z"
|
||||||
{
|
},
|
||||||
"Title": "Ancestors: The Humankind Odyssey",
|
{
|
||||||
"Date Started": "2023-08-08",
|
"Title": "Ancestors: The Humankind Odyssey",
|
||||||
"Date Finished": "",
|
"Platform": "PC",
|
||||||
"Developers": "Panache Digital Games",
|
"Date Started": "2023-08-08",
|
||||||
"Date Released": "2019-08-27",
|
"Developers": "Panache Digital Games",
|
||||||
"GiantBomb ID": "49527"
|
"Date Released": "2019-08-27",
|
||||||
},
|
"GiantBomb ID": "49527",
|
||||||
{
|
"date_added": "2020-05-24T18:26:59Z"
|
||||||
"Title": "TIS-100",
|
},
|
||||||
"Platforms": "PC",
|
{
|
||||||
"Date Started": "2016-12-24",
|
"Title": "TIS-100",
|
||||||
"Date Finished": "",
|
"Platforms": "PC",
|
||||||
"Developers": "Zachtronics Industries",
|
"Date Started": "2016-12-24",
|
||||||
"Date Released": "2015-07-20",
|
"Developers": "Zachtronics Industries",
|
||||||
"GiantBomb ID": "49901"
|
"Date Released": "2015-07-20",
|
||||||
}
|
"GiantBomb ID": "49901",
|
||||||
]
|
"date_added": "2020-01-06T12:41:38Z"
|
||||||
|
}
|
||||||
|
]
|
8959
data/games/log.json
8959
data/games/log.json
File diff suppressed because it is too large
Load diff
45658
data/games/wishlist.json
45658
data/games/wishlist.json
File diff suppressed because it is too large
Load diff
61
scripts/add_date_added_to_games.py
Normal file
61
scripts/add_date_added_to_games.py
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
with open(f"./scripts/grouvee.json", "r", encoding="utf-8") as log_file:
|
||||||
|
orig_log_items = json.load(log_file)
|
||||||
|
|
||||||
|
for log in ["log", "current", "wishlist"]:
|
||||||
|
print(f"Processing {log}…")
|
||||||
|
|
||||||
|
with open(f"./data/games/{log}.json", "r", encoding="utf-8") as log_file:
|
||||||
|
log_items = json.load(log_file)
|
||||||
|
|
||||||
|
for i, item in enumerate(log_items):
|
||||||
|
print(f"Processing {item['Title']}...")
|
||||||
|
|
||||||
|
if "GiantBomb ID" in item:
|
||||||
|
orig_item = [""]
|
||||||
|
if "" != item["GiantBomb ID"]:
|
||||||
|
orig_item = [
|
||||||
|
orig_item
|
||||||
|
for orig_item in orig_log_items
|
||||||
|
if orig_item["giantbomb_id"] == int(item["GiantBomb ID"])
|
||||||
|
]
|
||||||
|
|
||||||
|
elif "" == item["GiantBomb ID"]:
|
||||||
|
orig_item = [
|
||||||
|
orig_item
|
||||||
|
for orig_item in orig_log_items
|
||||||
|
if orig_item["name"] == item["Title"]
|
||||||
|
]
|
||||||
|
|
||||||
|
if [] == orig_item:
|
||||||
|
print(f"No item {item['Title']} found in original log!")
|
||||||
|
log_items[i] = item
|
||||||
|
break
|
||||||
|
|
||||||
|
elif 1 < len(orig_item):
|
||||||
|
raise Exception(f"Multiple items returned for {item['Title']}!")
|
||||||
|
|
||||||
|
else:
|
||||||
|
orig_item = orig_item[0]
|
||||||
|
|
||||||
|
if "Wish List" in orig_item["shelves"]:
|
||||||
|
item["date_added"] = orig_item["shelves"]["Wish List"]["date_added"]
|
||||||
|
|
||||||
|
elif "Backlog" in orig_item["shelves"]:
|
||||||
|
item["date_added"] = orig_item["shelves"]["Backlog"]["date_added"]
|
||||||
|
|
||||||
|
elif "Played" in orig_item["shelves"] and "log" == log:
|
||||||
|
item["date_added"] = orig_item["shelves"]["Played"]["date_added"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
print(f"No date_added for {item['Title']}!")
|
||||||
|
|
||||||
|
log_items[i] = item
|
||||||
|
|
||||||
|
print(f"Finished processing {item['Title']}.")
|
||||||
|
|
||||||
|
with open(f"./data/games/{log}.json", "w", encoding="utf-8") as log_file:
|
||||||
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
|
print(f"Finished processing {log}.")
|
|
@ -13,6 +13,7 @@ from dotenv import load_dotenv
|
||||||
|
|
||||||
authors = []
|
authors = []
|
||||||
|
|
||||||
|
|
||||||
def setup_logger(name="add_item"):
|
def setup_logger(name="add_item"):
|
||||||
"""Set up the logger for console and file"""
|
"""Set up the logger for console and file"""
|
||||||
|
|
||||||
|
@ -50,14 +51,18 @@ if "" == TVDB_API_KEY:
|
||||||
logger.error("TVDB API key not found")
|
logger.error("TVDB API key not found")
|
||||||
|
|
||||||
|
|
||||||
def return_if_exists(item_id, media_type, log) -> dict|None:
|
def return_if_exists(item_id, media_type, log) -> dict | None:
|
||||||
"""Returns an item if it exists in the requested log"""
|
"""Returns an item if it exists in the requested log"""
|
||||||
|
|
||||||
logger.info(f"Checking for '{item_id}' in '{log}'…")
|
logger.info(f"Checking for '{item_id}' in '{log}'…")
|
||||||
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file:
|
with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
|
||||||
log_items = json.load(log_file)
|
log_items = json.load(log_file)
|
||||||
|
|
||||||
existing_items = [log_item for log_item in log_items if "id" in log_item and log_item['id'] == int(item_id)]
|
existing_items = [
|
||||||
|
log_item
|
||||||
|
for log_item in log_items
|
||||||
|
if "id" in log_item and log_item["id"] == int(item_id)
|
||||||
|
]
|
||||||
if len(existing_items) > 0:
|
if len(existing_items) > 0:
|
||||||
logger.info(f"Found item in '{log}'")
|
logger.info(f"Found item in '{log}'")
|
||||||
return existing_items[-1]
|
return existing_items[-1]
|
||||||
|
@ -68,15 +73,19 @@ def delete_existing(item_id, media_type, log) -> None:
|
||||||
"""Deletes an item from a log if it matches the ID"""
|
"""Deletes an item from a log if it matches the ID"""
|
||||||
|
|
||||||
logger.info(f"Deleting '{item_id}' from '{log}'…")
|
logger.info(f"Deleting '{item_id}' from '{log}'…")
|
||||||
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file:
|
with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
|
||||||
log_items = json.load(log_file)
|
log_items = json.load(log_file)
|
||||||
|
|
||||||
old_len = len(log_items)
|
old_len = len(log_items)
|
||||||
log_items = [log_item for log_item in log_items if "id" not in log_item or ("id" in log_item and log_item['id'] != int(item_id))]
|
log_items = [
|
||||||
|
log_item
|
||||||
|
for log_item in log_items
|
||||||
|
if "id" not in log_item or ("id" in log_item and log_item["id"] != int(item_id))
|
||||||
|
]
|
||||||
if len(log_items) < (old_len - 1):
|
if len(log_items) < (old_len - 1):
|
||||||
raise Exception("More than one deletion made, discarding…")
|
raise Exception("More than one deletion made, discarding…")
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "w", encoding='utf-8') as log_file:
|
with open(f"./data/{media_type}/{log}.json", "w", encoding="utf-8") as log_file:
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
logger.info(f"'{item_id}' deleted from '{log}'")
|
logger.info(f"'{item_id}' deleted from '{log}'")
|
||||||
|
|
||||||
|
@ -94,8 +103,15 @@ def check_for_existing(item_id, media_type, log) -> dict[dict, str]:
|
||||||
existing_item["is_repeat"] = True
|
existing_item["is_repeat"] = True
|
||||||
return existing_item, None
|
return existing_item, None
|
||||||
|
|
||||||
for log_to_check in [p_log for p_log in ["log", "current", "wishlist"] if p_log != log]:
|
for log_to_check in [
|
||||||
if ("current" == log_to_check and media_type in ["books", "games", "tv-series"]) or ("wishlist" == log_to_check and media_type in ["books", "games", "films", "tv-series"]):
|
p_log for p_log in ["log", "current", "wishlist"] if p_log != log
|
||||||
|
]:
|
||||||
|
if (
|
||||||
|
"current" == log_to_check and media_type in ["books", "games", "tv-series"]
|
||||||
|
) or (
|
||||||
|
"wishlist" == log_to_check
|
||||||
|
and media_type in ["books", "games", "films", "tv-series"]
|
||||||
|
):
|
||||||
existing_item = return_if_exists(item_id, media_type, log_to_check)
|
existing_item = return_if_exists(item_id, media_type, log_to_check)
|
||||||
if existing_item is not None:
|
if existing_item is not None:
|
||||||
return existing_item, log_to_check
|
return existing_item, log_to_check
|
||||||
|
@ -156,12 +172,12 @@ def add_item_to_log(item_id, media_type, log) -> None:
|
||||||
# Save changes
|
# Save changes
|
||||||
logger.info(f"Adding {media_type} to {log}…")
|
logger.info(f"Adding {media_type} to {log}…")
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file:
|
with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
|
||||||
log_items = json.load(log_file)
|
log_items = json.load(log_file)
|
||||||
|
|
||||||
log_items.insert(0, item)
|
log_items.insert(0, item)
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "w", encoding='utf-8') as log_file:
|
with open(f"./data/{media_type}/{log}.json", "w", encoding="utf-8") as log_file:
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
logger.info(f"Added {media_type} {item_id} to {log}")
|
logger.info(f"Added {media_type} {item_id} to {log}")
|
||||||
|
@ -177,10 +193,12 @@ def import_by_id(import_id, media_type) -> dict:
|
||||||
return import_from_tmdb_by_id(import_id, media_type)
|
return import_from_tmdb_by_id(import_id, media_type)
|
||||||
|
|
||||||
if media_type in ["tv-episodes"]:
|
if media_type in ["tv-episodes"]:
|
||||||
return #import_from_tvdb_by_id(import_id, media_type)
|
return # import_from_tvdb_by_id(import_id, media_type)
|
||||||
|
|
||||||
if media_type in ["books"]:
|
if media_type in ["books"]:
|
||||||
return import_from_openlibrary_by_id(import_id, media_type)
|
return import_from_openlibrary_by_id(
|
||||||
|
"".join(re.findall(r"\d+", import_id)), media_type
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
|
def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
|
||||||
|
@ -191,9 +209,7 @@ def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
|
||||||
|
|
||||||
# Sending API request
|
# Sending API request
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
api_url,
|
api_url, headers={"Authorization": f"Bearer {TMDB_API_KEY}"}, timeout=15
|
||||||
headers={"Authorization": f"Bearer {TMDB_API_KEY}"},
|
|
||||||
timeout=15
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
|
@ -212,14 +228,8 @@ def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
|
||||||
|
|
||||||
response_data = json.loads(response.text)
|
response_data = json.loads(response.text)
|
||||||
|
|
||||||
if 1 == len(response_data):
|
|
||||||
item = response_data[0]
|
|
||||||
|
|
||||||
elif 0 == len(response_data):
|
|
||||||
raise Exception(f"Returned no results for {tmdb_id}")
|
|
||||||
|
|
||||||
# Modify the returned result to add additional data
|
# Modify the returned result to add additional data
|
||||||
return cleanup_result(item, media_type)
|
return cleanup_result(response_data, media_type)
|
||||||
|
|
||||||
|
|
||||||
def import_from_openlibrary_by_id(isbn, media_type) -> dict:
|
def import_from_openlibrary_by_id(isbn, media_type) -> dict:
|
||||||
|
@ -253,10 +263,12 @@ def import_from_openlibrary_by_id(isbn, media_type) -> dict:
|
||||||
for i, sub_item in enumerate(item[key]):
|
for i, sub_item in enumerate(item[key]):
|
||||||
item[key][i] = import_from_openlibrary_by_ol_key(sub_item["key"])
|
item[key][i] = import_from_openlibrary_by_ol_key(sub_item["key"])
|
||||||
|
|
||||||
|
|
||||||
if "works" in item:
|
if "works" in item:
|
||||||
if len(item["works"]) > 1:
|
if len(item["works"]) > 1:
|
||||||
raise Exception(f"Multiple works found for {isbn}")
|
print(f"Multiple works found for {isbn}:")
|
||||||
|
print(item["works"])
|
||||||
|
idx = input(f"Select ID to use [0-{len(item['works'])-1}]: ")
|
||||||
|
item["works"][0] = item["works"][int(idx)]
|
||||||
|
|
||||||
item["work"] = item["works"][0]
|
item["work"] = item["works"][0]
|
||||||
del item["works"]
|
del item["works"]
|
||||||
|
@ -275,20 +287,28 @@ def import_from_openlibrary_by_ol_key(key) -> dict:
|
||||||
_, mode, ol_id = key.split("/")
|
_, mode, ol_id = key.split("/")
|
||||||
|
|
||||||
if "authors" == mode:
|
if "authors" == mode:
|
||||||
with open(f"./scripts/caching/authors.json", "r", encoding='utf-8') as authors_cache:
|
with open(
|
||||||
|
f"./scripts/caching/authors.json", "r", encoding="utf-8"
|
||||||
|
) as authors_cache:
|
||||||
cached_authors = json.load(authors_cache)
|
cached_authors = json.load(authors_cache)
|
||||||
|
|
||||||
if mode in ["works", "authors"]:
|
if mode in ["works", "authors"]:
|
||||||
if "authors" == mode:
|
if "authors" == mode:
|
||||||
matched_cached_authors = [aut for aut in cached_authors if aut['id'] == ol_id]
|
matched_cached_authors = [
|
||||||
|
aut for aut in cached_authors if aut["id"] == ol_id
|
||||||
|
]
|
||||||
if len(matched_cached_authors) == 1:
|
if len(matched_cached_authors) == 1:
|
||||||
logging.info(f"Found cached author '{matched_cached_authors[0]['name']}'")
|
logging.info(
|
||||||
|
f"Found cached author '{matched_cached_authors[0]['name']}'"
|
||||||
|
)
|
||||||
return matched_cached_authors[0]
|
return matched_cached_authors[0]
|
||||||
|
|
||||||
api_url = f"https://openlibrary.org{key}"
|
api_url = f"https://openlibrary.org{key}"
|
||||||
|
|
||||||
# Sending API request
|
# Sending API request
|
||||||
response = requests.get(api_url, headers={"accept": "application/json"}, timeout=15)
|
response = requests.get(
|
||||||
|
api_url, headers={"accept": "application/json"}, timeout=15
|
||||||
|
)
|
||||||
|
|
||||||
# Process the response
|
# Process the response
|
||||||
if 200 == response.status_code:
|
if 200 == response.status_code:
|
||||||
|
@ -316,9 +336,7 @@ def import_from_openlibrary_by_ol_key(key) -> dict:
|
||||||
logger.info(f"Caching author '{author['name']}'…")
|
logger.info(f"Caching author '{author['name']}'…")
|
||||||
cached_authors.append(author)
|
cached_authors.append(author)
|
||||||
with open(
|
with open(
|
||||||
f"./scripts/caching/authors.json",
|
f"./scripts/caching/authors.json", "w", encoding="utf-8"
|
||||||
"w",
|
|
||||||
encoding='utf-8'
|
|
||||||
) as authors_cache:
|
) as authors_cache:
|
||||||
json.dump(cached_authors, authors_cache, indent=4)
|
json.dump(cached_authors, authors_cache, indent=4)
|
||||||
logger.info(f"Author '{author['name']}' cached!")
|
logger.info(f"Author '{author['name']}' cached!")
|
||||||
|
@ -345,39 +363,45 @@ def cleanup_result(item, media_type) -> dict:
|
||||||
for field_name in [
|
for field_name in [
|
||||||
"adult", # TMDB
|
"adult", # TMDB
|
||||||
"backdrop_path", # TMDB
|
"backdrop_path", # TMDB
|
||||||
|
"budget", # TMDB
|
||||||
"copyright_date", # OpenLibrary
|
"copyright_date", # OpenLibrary
|
||||||
"classifications", # OpenLibrary
|
"classifications", # OpenLibrary
|
||||||
"created", # OpenLibrary
|
"created", # OpenLibrary
|
||||||
"dewey_decimal_class", # OpenLibary
|
"dewey_decimal_class", # OpenLibary
|
||||||
"episode_type", # TMDB
|
"episode_type", # TMDB
|
||||||
"first_sentence", # OpenLibrary
|
"first_sentence", # OpenLibrary
|
||||||
"genre_ids", # TMDB
|
"genre_ids", # TMDB
|
||||||
|
"homepage", # TMDB
|
||||||
"identifiers", # OpenLibrary
|
"identifiers", # OpenLibrary
|
||||||
"media_type", # TMDB
|
"media_type", # TMDB
|
||||||
"last_modified", # OpenLibrary
|
"last_modified", # OpenLibrary
|
||||||
"latest_revision", # OpenLibrary
|
"latest_revision", # OpenLibrary
|
||||||
"lc_classifications", # OpenLibrary
|
"lc_classifications", # OpenLibrary
|
||||||
"lccn", # OpenLibrary
|
"lccn", # OpenLibrary
|
||||||
"local_id", # OpenLibrary
|
"local_id", # OpenLibrary
|
||||||
"notes", # OpenLibrary
|
"notes", # OpenLibrary
|
||||||
"ocaid", # OpenLibrary
|
"ocaid", # OpenLibrary
|
||||||
"oclc_numbers", # OpenLibrary
|
"oclc_numbers", # OpenLibrary
|
||||||
"pagination", # OpenLibrary
|
"pagination", # OpenLibrary
|
||||||
"physical_dimensions", # OpenLibrary
|
"physical_dimensions", # OpenLibrary
|
||||||
"popularity", # TMDB
|
"popularity", # TMDB
|
||||||
"production_code", # TMDB
|
"production_code", # TMDB
|
||||||
|
"production_companies", # TMDB
|
||||||
|
"revenue", # TMDB
|
||||||
"revision", # OpenLibrary
|
"revision", # OpenLibrary
|
||||||
"runtime", # TMDB
|
"runtime", # TMDB
|
||||||
"source_records", # OpenLibrary
|
"source_records", # OpenLibrary
|
||||||
|
"status", # TMDB
|
||||||
"still_path", # TMDB
|
"still_path", # TMDB
|
||||||
"table_of_contents", # OpenLibrary
|
"table_of_contents", # OpenLibrary
|
||||||
|
"tagline", # TMDB
|
||||||
"type", # OpenLibrary
|
"type", # OpenLibrary
|
||||||
"uri_descriptions", # OpenLibrary
|
"uri_descriptions", # OpenLibrary
|
||||||
"url", # OpenLibrary
|
"url", # OpenLibrary
|
||||||
"video", # TMDB
|
"video", # TMDB
|
||||||
"vote_average", # TMDB
|
"vote_average", # TMDB
|
||||||
"vote_count", # TMDB
|
"vote_count", # TMDB
|
||||||
"weight", # OpenLibrary
|
"weight", # OpenLibrary
|
||||||
]:
|
]:
|
||||||
if field_name in item:
|
if field_name in item:
|
||||||
del item[field_name]
|
del item[field_name]
|
||||||
|
@ -413,21 +437,28 @@ def cleanup_result(item, media_type) -> dict:
|
||||||
]
|
]
|
||||||
|
|
||||||
if "translation_of" in item:
|
if "translation_of" in item:
|
||||||
if item["translation_of"].split(":")[0].lower() == item["work"]["title"].split(":")[0].lower():
|
if not (
|
||||||
del item["translation_of"]
|
item["translation_of"].split(":")[0].lower()
|
||||||
else:
|
== item["work"]["title"].split(":")[0].lower()
|
||||||
raise Exception(
|
):
|
||||||
|
logger.warn(
|
||||||
f"translation_of '{item['translation_of']}' \
|
f"translation_of '{item['translation_of']}' \
|
||||||
is different to work title '{item['work']['title']}'"
|
is different to work title '{item['work']['title']}'"
|
||||||
)
|
)
|
||||||
|
if 'y' != input("Accept change? [y|n]: "):
|
||||||
|
raise Exception(
|
||||||
|
f"translation_of '{item['translation_of']}' \
|
||||||
|
is different to work title '{item['work']['title']}'"
|
||||||
|
)
|
||||||
|
del item["translation_of"]
|
||||||
|
|
||||||
if "translated_from" in item:
|
if "translated_from" in item:
|
||||||
if len(item["translated_from"]) > 1:
|
if len(item["translated_from"]) > 1:
|
||||||
raise Exception("Multiple translated_from results")
|
raise Exception("Multiple translated_from results")
|
||||||
|
|
||||||
item["work"]["original_language"] = item["translated_from"][0][
|
item["work"]["original_language"] = item["translated_from"][0]["key"].split(
|
||||||
"key"
|
"/"
|
||||||
].split("/")[2]
|
)[2]
|
||||||
del item["translated_from"]
|
del item["translated_from"]
|
||||||
|
|
||||||
if "date_added" not in item:
|
if "date_added" not in item:
|
||||||
|
@ -459,7 +490,7 @@ def main() -> None:
|
||||||
log = input("Enter log to update [log|current|wishlist]: ")
|
log = input("Enter log to update [log|current|wishlist]: ")
|
||||||
|
|
||||||
while re.search("[0-9]+", item_id) is None:
|
while re.search("[0-9]+", item_id) is None:
|
||||||
item_id = input("Enter ISBN: ")
|
item_id = "".join(re.findall(r"\d+", input("Enter ISBN: ")))
|
||||||
|
|
||||||
elif "tv-episodes" == media_type:
|
elif "tv-episodes" == media_type:
|
||||||
log = "log"
|
log = "log"
|
||||||
|
|
Loading…
Reference in a new issue