Compare commits

...

6 commits

Author SHA1 Message Date
9dbcc64188 update log 2024-02-03 23:25:32 +00:00
70c8d382b0 ignore script cache files 2024-02-03 23:25:25 +00:00
b4d64ccd34 fix script 2024-02-03 23:25:14 +00:00
1c6cde3f4c process logs 2024-02-03 23:25:04 +00:00
4a16cb8490 re-add date_added field from Grouvee export 2024-02-03 16:03:15 +00:00
1f5760073f update logs 2024-02-03 15:31:41 +00:00
10 changed files with 201630 additions and 67852 deletions

1
.gitignore vendored
View file

@ -7,3 +7,4 @@ public/
logs/ logs/
.env .env
catalogue_venv/ catalogue_venv/
scripts/caching/

View file

@ -43,59 +43,287 @@
"added_by_id": "9781449373320" "added_by_id": "9781449373320"
}, },
{ {
"Title": "Good-Bye to All That: An Autobiography", "title": "Goodbye to all that",
"Author": "Robert Graves", "authors": [
"ISBN": null, {
"ISBN13": null, "id": "OL1097266A",
"Publisher": "Anchor Books", "name": "Robert Graves",
"Binding": "Paperback", "personal_name": "Graves, Robert"
"Number of Pages": 347, }
"Year Published": "1958", ],
"Original Publication Year": "1929", "publish_date": "1960",
"Date Added": "2013-05-07", "publishers": [
"Date Started": "2024-01-01", "Penguin"
"Read Count": 0 ],
"series": [
"Penguin twentieth century classics"
],
"subjects": [
"Graves, Robert, -- 1895-1985."
],
"languages": [
"eng"
],
"edition_name": "Rev. ed. / with a new prologue and epilogue.",
"isbn_10": "0140180982",
"publish_country": "xxk",
"by_statement": "Robert Graves.",
"number_of_pages": 282,
"covers": [
13074261
],
"work": {
"id": "OL5036954W",
"title": "Good-Bye to All That",
"subjects": [
"Biography",
"British Personal narratives",
"English Authors",
"English Personal narratives",
"European War, 1914-1918",
"Personal narratives",
"World War, 1914-1918",
"World War (1914-1918) fast (OCoLC)fst01180746",
"Biograf\ufffdia",
"Social conditions",
"Autores ingleses",
"Novela inglesa",
"BIOGRAPHY & AUTOBIOGRAPHY",
"World war, 1914-1918, personal narratives",
"Authors, biography",
"Authors, english",
"Fiction, general"
]
},
"id": "OL18237727M",
"published_in": [
"Harmondsworth"
],
"date_added": "2024-02-03"
}, },
{ {
"Title": "The End of Policing", "title": "The End of Policing",
"Author": "Alex S. Vitale", "publishers": [
"ISBN": null, "Verso Books"
"ISBN13": null, ],
"Publisher": "Verso", "publish_date": "October 2017",
"Binding": "Kindle Edition", "isbn_13": "9781784782900",
"Number of Pages": 272, "languages": [
"Year Published": "2017", "eng"
"Original Publication Year": "2017", ],
"Date Added": "2020-06-05", "physical_format": "Ebook",
"Date Started": "2024-01-01", "work": {
"Read Count": 0 "id": "OL19735566W",
"title": "The End of Policing",
"subjects": [
"Police",
"Police misconduct",
"black lives matter",
"Police-community relations",
"Police brutality",
"African Americans",
"Violence against",
"Social conditions",
"Discrimination in criminal justice administration",
"Race relations",
"POLITICAL SCIENCE",
"Political Freedom & Security",
"Law Enforcement",
"SOCIAL SCIENCE",
"Discrimination & Race Relations",
"Public Policy",
"General",
"BUSINESS & ECONOMICS",
"Infrastructure",
"POLITICAL SCIENCE / Political Freedom & Security / Law Enforcement",
"SOCIAL SCIENCE / Discrimination & Race Relations",
"POLITICAL SCIENCE / Public Policy / General"
]
},
"id": "OL50982392M",
"date_added": "2024-02-03"
}, },
{ {
"Title": "France", "title": "France",
"Series": "Lonely Planet", "publishers": [
"Author": "Lonely Planet", "Loneley Planet Global"
"ISBN13": "9781788680513", ],
"Publisher": "Lonely Planet Global Limited", "publish_date": "November 2021",
"Binding": "Paperback", "covers": [
"Number of Pages": 1021, 14575043
"Year Published": "2021", ],
"Original Publication Year": "1994", "edition_name": "Fourteenth edition",
"Date Added": "2024-01-02", "languages": [
"Date Started": "2023-12-25", "eng"
"Read Count": 0 ],
"physical_format": "Paperback",
"number_of_pages": 1024,
"contributors": [
{
"role": "Additional Author (this edition)",
"name": "Joel Balsam"
}, },
{ {
"Title": "The Design of Everyday Things", "role": "Additional Author (this edition)",
"Author": "Donald A. Norman", "name": "Alexis Averbuck"
"ISBN": "0465067107", },
"ISBN13": "9780465067107", {
"Publisher": "Basic Books", "role": "Additional Author (this edition)",
"Binding": "Paperback", "name": "Oliver Berry"
"Number of Pages": 240, },
"Year Published": "2002", {
"Original Publication Year": "1988", "role": "Additional Author (this edition)",
"Date Added": "2021-12-01", "name": "Celeste Brash"
"Date Started": "2023-12-24", },
"Read Count": 0 {
"role": "Additional Author (this edition)",
"name": "Stuart Butler"
},
{
"role": "Additional Author (this edition)",
"name": "Jean-Bernard Carillet"
},
{
"role": "Additional Author (this edition)",
"name": "Gregor Clark"
},
{
"role": "Additional Author (this edition)",
"name": "Mark Elliott"
},
{
"role": "Additional Author (this edition)",
"name": "Steve Fallon"
},
{
"role": "Additional Author (this edition)",
"name": "Anita Isalska"
},
{
"role": "Additional Author (this edition)",
"name": "Catherine Le Nevez"
},
{
"role": "Additional Author (this edition)",
"name": "Christopher Pitts"
},
{
"role": "Additional Author (this edition)",
"name": "Regis St Louis"
},
{
"role": "Additional Author (this edition)",
"name": "Ryan Ver Berkmoes"
}
],
"isbn_13": "9781788680523",
"work": {
"id": "OL15419603W",
"title": "France",
"subjects": [
"Guidebooks",
"Travel - Foreign",
"Special Interest - Family",
"Travel",
"France",
"Travel & holiday guides",
"Europe - France"
]
},
"id": "OL50982390M",
"date_added": "2024-02-03"
},
{
"edition_name": "1st Basic paperback.",
"title": "The design of everyday things",
"languages": [
"eng"
],
"subjects": [
"Design, Industrial -- Psychological aspects",
"Human engineering"
],
"publish_country": "nyu",
"by_statement": "Donald A. Norman.",
"publishers": [
"Basic Books"
],
"authors": [
{
"id": "OL224976A",
"name": "Donald A. Norman"
}
],
"number_of_pages": 257,
"publish_date": "2002",
"work_title": [
"Psychology of everyday things"
],
"isbn_10": "0465067107",
"covers": [
14428210
],
"work": {
"id": "OL1879162W",
"title": "The Psychology of Everyday Things",
"first_publish_date": "August 1998",
"subjects": [
"Ergonomie",
"Industrial Psychology",
"Industri\u00eble vormgeving",
"Industrial Design",
"Psychological aspects",
"Psychology textbooks",
"Psychological aspects of Industrial design",
"Textbooks",
"Aspect psychologique",
"Design",
"Humanities textbooks",
"Projetos (administracao)",
"Human engineering",
"Long Now Manual for Civilization",
"New York Times reviewed",
"Cognitive psychology",
"Livres num\u00e9riques",
"Ergonomics",
"E-books",
"BUSINESS & ECONOMICS",
"Industries",
"Retailing",
"Product",
"PSYCHOLOGY",
"Applied Psychology",
"Electronic books",
"Industriell formgivning",
"Psykologiska aspekter",
"Industries / Retailing",
"Mechanical Engineering",
"Engineering & Applied Sciences",
"Industrial & Management Engineering",
"Nonfiction",
"Art",
"Business",
"Industrial design--psychological aspects",
"Psychology, industrial",
"Ts171.4 .n67 1990",
"Ts 171.4 n842p 1990",
"620.8/2",
"Business & economics--industries--retailing",
"Psychology--applied psychology",
"Design--product",
"Industriell formgivning--psykologiska aspekter",
"Ts171.4 .n67 2013",
"745.2019",
"745.2001/9",
"Bus057000 psy003000 des011000"
]
},
"id": "OL3702614M",
"published_in": [
"New York"
],
"date_added": "2021-12-01",
"date_started": "2023-12-24",
"read_count": 0
} }
] ]

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,94 @@
[ [
{
"belongs_to_collection": null,
"genres": [
{
"id": 99,
"name": "Documentary"
}
],
"id": 465383,
"imdb_id": "tt7162400",
"original_language": "ar",
"original_title": "Ouroboros",
"overview": "This film is an homage to the Gaza Strip and to the possibility of hope beyond hopelessness. Ouroboros, the symbol of the snake eating its tail, is both end and beginning: death as regeneration. A 74-minute experimental narrative film that turns the destruction of Gaza into a story of heartbreak, Ouroboros asks what it means to be human when humanity has failed. Taking the form of a love story, the film's central character is Diego Marcon, a man who embarks on a circular journey to shed his pain only to experience it, again and again. In the course of a single day, his travel fuses together Native American territories, the ancient Italian city of Matera, a castle in Brittany, and the ruins of the Gaza Strip into a single landscape.",
"poster_path": "/37W4z9dkK77OTLMbDO9KvVADxXX.jpg",
"production_countries": [
{
"iso_3166_1": "BE",
"name": "Belgium"
},
{
"iso_3166_1": "FR",
"name": "France"
},
{
"iso_3166_1": "PS",
"name": "Palestinian Territory"
},
{
"iso_3166_1": "QA",
"name": "Qatar"
}
],
"release_date": "2017-04-29",
"spoken_languages": [
{
"english_name": "Arabic",
"iso_639_1": "ar",
"name": "\u0627\u0644\u0639\u0631\u0628\u064a\u0629"
},
{
"english_name": "English",
"iso_639_1": "en",
"name": "English"
},
{
"english_name": "Italian",
"iso_639_1": "it",
"name": "Italiano"
}
],
"title": "Ouroboros",
"date_added": "2024-02-03",
"date_finished": "2024-02-03",
"added_by_id": "465383",
"comments": "Radical Approaches to Filmmaking course"
},
{
"belongs_to_collection": null,
"genres": [
{
"id": 18,
"name": "Drama"
}
],
"id": 44967,
"imdb_id": "tt0015361",
"original_language": "ru",
"original_title": "\u0421\u0442\u0430\u0447\u043a\u0430",
"overview": "Workers in a factory in pre-revolutionary Russia go on strike and are met by violent suppression.",
"poster_path": "/mrDTaMxQKxWZ3uEs2G58vJWJDDL.jpg",
"production_countries": [
{
"iso_3166_1": "SU",
"name": "Soviet Union"
}
],
"release_date": "1925-04-28",
"spoken_languages": [
{
"english_name": "No Language",
"iso_639_1": "xx",
"name": "No Language"
}
],
"title": "Strike",
"date_added": "2024-02-03",
"date_finished": "2024-02-03",
"added_by_id": "44967",
"comments": "Radical Approaches to Filmmaking course"
},
{ {
"id": 14572, "id": 14572,
"original_language": "ja", "original_language": "ja",

View file

@ -1,27 +1,29 @@
[ [
{ {
"Title": "Hellblade: Senua's Sacrifice", "Title": "Death Stranding",
"Series": "Hellblade", "Date Started": "2024-01-25",
"Developers": "Ninja Theory Ltd.", "Platforms": "PC",
"Date Released": "2017-08-08", "Developers": "Kojima Productions",
"Date Started": "2024-01-21", "Date Released": "2019-11-08",
"GiantBomb ID": "47363" "GiantBomb ID": "54232",
"date_added": "2019-12-04T21:27:08Z"
}, },
{ {
"Title": "Ancestors: The Humankind Odyssey", "Title": "Ancestors: The Humankind Odyssey",
"Platform": "PC",
"Date Started": "2023-08-08", "Date Started": "2023-08-08",
"Date Finished": "",
"Developers": "Panache Digital Games", "Developers": "Panache Digital Games",
"Date Released": "2019-08-27", "Date Released": "2019-08-27",
"GiantBomb ID": "49527" "GiantBomb ID": "49527",
"date_added": "2020-05-24T18:26:59Z"
}, },
{ {
"Title": "TIS-100", "Title": "TIS-100",
"Platforms": "PC", "Platforms": "PC",
"Date Started": "2016-12-24", "Date Started": "2016-12-24",
"Date Finished": "",
"Developers": "Zachtronics Industries", "Developers": "Zachtronics Industries",
"Date Released": "2015-07-20", "Date Released": "2015-07-20",
"GiantBomb ID": "49901" "GiantBomb ID": "49901",
"date_added": "2020-01-06T12:41:38Z"
} }
] ]

File diff suppressed because it is too large Load diff

View file

@ -3,37 +3,36 @@
"Title": "Halo 2600", "Title": "Halo 2600",
"Developers": "", "Developers": "",
"Date Released": "2010-07-31", "Date Released": "2010-07-31",
"GiantBomb ID": "32258" "GiantBomb ID": "32258",
"date_added": "2013-07-03T14:48:07Z"
}, },
{ {
"Title": "Fear and Hunger", "Title": "Fear and Hunger",
"Developers": "Miro Haverinen", "Developers": "Miro Haverinen",
"Date Released": "2018-12-11", "Date Released": "2018-12-11",
"GiantBomb ID": "" "GiantBomb ID": "71763"
}, },
{ {
"Title": "Northern Journey", "Title": "Northern Journey",
"Developers": "Slid Studio", "Developers": "Slid Studio",
"Date Released": "2021-08-18", "Date Released": "2021-08-18"
"GiantBomb ID": ""
}, },
{ {
"Title": "A Good Gardener", "Title": "A Good Gardener",
"Developers": "Turnfollow", "Developers": "Turnfollow",
"Date Released": "", "Date Released": "",
"GiantBomb ID": "" "GiantBomb ID": "51757"
}, },
{ {
"Title": "Before the Green Moon", "Title": "Before the Green Moon",
"Developers": "Turnfollow", "Developers": "Turnfollow",
"Date Released": "2023-03-14", "Date Released": "2023-03-14"
"GiantBomb ID": ""
}, },
{ {
"Title": "Inscryption", "Title": "Inscryption",
"Developers": "Daniel Mullins Games", "Developers": "Daniel Mullins Games",
"Date Released": "2021-10-19", "Date Released": "2021-10-19",
"GiantBomb ID": "https://www.giantbomb.com/inscryption/3030-82965/" "GiantBomb ID": "82965"
}, },
{ {
"Title": "King of Dragon Pass", "Title": "King of Dragon Pass",
@ -480,7 +479,7 @@
"GiantBomb ID": "16844" "GiantBomb ID": "16844"
}, },
{ {
"Title": "CROSS†CHANNEL", "Title": "CROSS\u2020CHANNEL",
"Developers": "FlyingShine, Regista", "Developers": "FlyingShine, Regista",
"Date Released": "2003-09-26", "Date Released": "2003-09-26",
"GiantBomb ID": "32147" "GiantBomb ID": "32147"
@ -880,7 +879,7 @@
"GiantBomb ID": "23543" "GiantBomb ID": "23543"
}, },
{ {
"Title": "Dudebro™ — My Shit Is Fucked Up So I Got to Shoot/Slice You II: Its Straight-Up Dawg Time", "Title": "Dudebro\u2122 \u2014 My Shit Is Fucked Up So I Got to Shoot/Slice You II: It\u2019s Straight-Up Dawg Time",
"Developers": "Grimoire Assembly Forge", "Developers": "Grimoire Assembly Forge",
"Date Released": "", "Date Released": "",
"GiantBomb ID": "29993" "GiantBomb ID": "29993"
@ -1318,7 +1317,7 @@
"GiantBomb ID": "35693" "GiantBomb ID": "35693"
}, },
{ {
"Title": "Déjà Vu: A Nightmare Comes True", "Title": "D\u00e9j\u00e0 Vu: A Nightmare Comes True",
"Series": "D\\u00e9j\\u00e0 Vu, MacVenture", "Series": "D\\u00e9j\\u00e0 Vu, MacVenture",
"Developers": "ICOM Simulations, Inc.", "Developers": "ICOM Simulations, Inc.",
"Date Released": "1985", "Date Released": "1985",
@ -1333,8 +1332,7 @@
{ {
"Title": "Haunted Cities Volume 2", "Title": "Haunted Cities Volume 2",
"Developers": "Kitty Horrorshow", "Developers": "Kitty Horrorshow",
"Date Released": "2017-10-31", "Date Released": "2017-10-31"
"GiantBomb ID": ""
}, },
{ {
"Title": "PixelJunk Eden", "Title": "PixelJunk Eden",
@ -2392,7 +2390,7 @@
"GiantBomb ID": "18742" "GiantBomb ID": "18742"
}, },
{ {
"Title": "Hate Plus: ~Mutes Golden Days~", "Title": "Hate Plus: ~Mute\u2019s Golden Days~",
"Developers": "Christine Love", "Developers": "Christine Love",
"Date Released": "2013-08-19", "Date Released": "2013-08-19",
"GiantBomb ID": "42478" "GiantBomb ID": "42478"
@ -2525,7 +2523,7 @@
"GiantBomb ID": "15236" "GiantBomb ID": "15236"
}, },
{ {
"Title": "Pokémon Black/White Version 2", "Title": "Pok\u00e9mon Black/White Version 2",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2012-10-07", "Date Released": "2012-10-07",
@ -2997,7 +2995,7 @@
"GiantBomb ID": "62202" "GiantBomb ID": "62202"
}, },
{ {
"Title": "TrackMania² Canyon", "Title": "TrackMania\u00b2 Canyon",
"Series": "TrackMania", "Series": "TrackMania",
"Developers": "Nadeo", "Developers": "Nadeo",
"Date Released": "2011-09-14", "Date Released": "2011-09-14",
@ -4436,7 +4434,7 @@
"GiantBomb ID": "1420" "GiantBomb ID": "1420"
}, },
{ {
"Title": "Bientôt l'été", "Title": "Bient\u00f4t l'\u00e9t\u00e9",
"Developers": "Tale of Tales BVBA", "Developers": "Tale of Tales BVBA",
"Date Released": "2013-02-06", "Date Released": "2013-02-06",
"GiantBomb ID": "41515" "GiantBomb ID": "41515"
@ -4912,7 +4910,7 @@
"Platforms": "PC", "Platforms": "PC",
"Developers": " 10tons Ltd", "Developers": " 10tons Ltd",
"Date Released": "2020-11-06", "Date Released": "2020-11-06",
"GiantBomb ID": "" "GiantBomb ID": "85250"
}, },
{ {
"Title": "Syphon Filter 2", "Title": "Syphon Filter 2",
@ -5016,7 +5014,7 @@
"GiantBomb ID": "4372" "GiantBomb ID": "4372"
}, },
{ {
"Title": "ōdi∙um", "Title": "\u014ddi\u2219um",
"Series": "Gorky", "Series": "Gorky",
"Developers": "Hyperion Entertainment, Metropolis Software House, TopWare Interactive", "Developers": "Hyperion Entertainment, Metropolis Software House, TopWare Interactive",
"Date Released": "1999-11-30", "Date Released": "1999-11-30",
@ -5057,7 +5055,7 @@
"GiantBomb ID": "29299" "GiantBomb ID": "29299"
}, },
{ {
"Title": "Viva Piñata: Trouble in Paradise", "Title": "Viva Pi\u00f1ata: Trouble in Paradise",
"Series": "Viva Pi\\u00f1ata", "Series": "Viva Pi\\u00f1ata",
"Developers": "Rare, Ltd.", "Developers": "Rare, Ltd.",
"Date Released": "2008-09-02", "Date Released": "2008-09-02",
@ -5236,7 +5234,7 @@
"GiantBomb ID": "20105" "GiantBomb ID": "20105"
}, },
{ {
"Title": "Sharin no Kuni, Himawari no Shōjo", "Title": "Sharin no Kuni, Himawari no Sh\u014djo",
"Developers": "Akabei Soft", "Developers": "Akabei Soft",
"Date Released": "2005-11-25", "Date Released": "2005-11-25",
"GiantBomb ID": "33196" "GiantBomb ID": "33196"
@ -7424,7 +7422,7 @@
"GiantBomb ID": "24015" "GiantBomb ID": "24015"
}, },
{ {
"Title": "Pokémon FireRed/LeafGreen", "Title": "Pok\u00e9mon FireRed/LeafGreen",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Creatures, Inc., Game Freak, Inc.", "Developers": "Creatures, Inc., Game Freak, Inc.",
"Date Released": "2004-09-07", "Date Released": "2004-09-07",
@ -7780,7 +7778,7 @@
"GiantBomb ID": "10081" "GiantBomb ID": "10081"
}, },
{ {
"Title": "Viva Piñata", "Title": "Viva Pi\u00f1ata",
"Series": "Viva Pi\\u00f1ata", "Series": "Viva Pi\\u00f1ata",
"Developers": "Rare, Ltd.", "Developers": "Rare, Ltd.",
"Date Released": "2006-11-09", "Date Released": "2006-11-09",
@ -7834,7 +7832,7 @@
"GiantBomb ID": "9290" "GiantBomb ID": "9290"
}, },
{ {
"Title": "Chäos", "Title": "Ch\u00e4os",
"Platforms": "Wish List: {date_added: 2013-06-22T21:58:00Z, url: https://www.grouvee.com/user/1002-544c34b8c8/shelves/4969-wish-list/}", "Platforms": "Wish List: {date_added: 2013-06-22T21:58:00Z, url: https://www.grouvee.com/user/1002-544c34b8c8/shelves/4969-wish-list/}",
"Series": "Adventure", "Series": "Adventure",
"Developers": "Chaos;Head, Science Adventure", "Developers": "Chaos;Head, Science Adventure",
@ -7927,8 +7925,7 @@
{ {
"Title": "Things We Lost in the Flood", "Title": "Things We Lost in the Flood",
"Developers": "Awkward Silence Games", "Developers": "Awkward Silence Games",
"Date Released": "2019-06-19", "Date Released": "2019-06-19"
"GiantBomb ID": ""
}, },
{ {
"Title": "Twisted: The Game Show", "Title": "Twisted: The Game Show",
@ -8155,7 +8152,7 @@
"GiantBomb ID": "26112" "GiantBomb ID": "26112"
}, },
{ {
"Title": "Pokémon HeartGold/SoulSilver", "Title": "Pok\u00e9mon HeartGold/SoulSilver",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2009-09-12", "Date Released": "2009-09-12",
@ -8542,8 +8539,7 @@
{ {
"Title": "Haunted Cities", "Title": "Haunted Cities",
"Developers": "Kitty Horrorshow", "Developers": "Kitty Horrorshow",
"Date Released": "2016-05-06", "Date Released": "2016-05-06"
"GiantBomb ID": ""
}, },
{ {
"Title": "Worms World Party", "Title": "Worms World Party",
@ -8986,7 +8982,7 @@
"Title": "The Space Between", "Title": "The Space Between",
"Developers": "Christoph Frey", "Developers": "Christoph Frey",
"Date Released": "2019-04-06", "Date Released": "2019-04-06",
"GiantBomb ID": "" "GiantBomb ID": "74413"
}, },
{ {
"Title": "John Woo Presents Stranglehold", "Title": "John Woo Presents Stranglehold",
@ -9084,7 +9080,7 @@
"Platforms": "PC", "Platforms": "PC",
"Developers": "", "Developers": "",
"Date Released": "2021-01-04", "Date Released": "2021-01-04",
"GiantBomb ID": "" "GiantBomb ID": "81879"
}, },
{ {
"Title": "Theocracy", "Title": "Theocracy",
@ -10366,7 +10362,7 @@
"GiantBomb ID": "235" "GiantBomb ID": "235"
}, },
{ {
"Title": "Assassin's Creed: Altaïr's Chronicles", "Title": "Assassin's Creed: Alta\u00efr's Chronicles",
"Series": "Assassin's Creed", "Series": "Assassin's Creed",
"Developers": "Gameloft S.A.", "Developers": "Gameloft S.A.",
"Date Released": "2008-02-05", "Date Released": "2008-02-05",
@ -10561,8 +10557,7 @@
"Title": "PataNoir", "Title": "PataNoir",
"Platforms": "PC", "Platforms": "PC",
"Developers": "", "Developers": "",
"Date Released": "2016-06-01", "Date Released": "2016-06-01"
"GiantBomb ID": ""
}, },
{ {
"Title": "Midnight Club 3: DUB Edition Remix", "Title": "Midnight Club 3: DUB Edition Remix",
@ -11154,7 +11149,7 @@
"GiantBomb ID": "77201" "GiantBomb ID": "77201"
}, },
{ {
"Title": "Brain Age²: More Training in Minutes a Day!", "Title": "Brain Age\u00b2: More Training in Minutes a Day!",
"Series": "Brain Age, Touch! Generations", "Series": "Brain Age, Touch! Generations",
"Developers": "Nintendo SDD Software Development Group, Nintendo SPD Group No.2", "Developers": "Nintendo SDD Software Development Group, Nintendo SPD Group No.2",
"Date Released": "2005-12-29", "Date Released": "2005-12-29",
@ -12686,7 +12681,7 @@
"GiantBomb ID": "16762" "GiantBomb ID": "16762"
}, },
{ {
"Title": "Adventure Time: Hey Ice King! Whyd you steal our garbage?!!", "Title": "Adventure Time: Hey Ice King! Why\u2019d you steal our garbage?!!",
"Series": "Adventure Time, Cartoon Network", "Series": "Adventure Time, Cartoon Network",
"Developers": "WayForward Technologies", "Developers": "WayForward Technologies",
"Date Released": "2012-11-20", "Date Released": "2012-11-20",
@ -13661,7 +13656,7 @@
"GiantBomb ID": "31188" "GiantBomb ID": "31188"
}, },
{ {
"Title": "ARTé: Mecenas", "Title": "ART\u00e9: Mecenas",
"Developers": "Triseum", "Developers": "Triseum",
"Date Released": "2016-06-01", "Date Released": "2016-06-01",
"GiantBomb ID": "61359" "GiantBomb ID": "61359"
@ -14058,7 +14053,7 @@
"GiantBomb ID": "10692" "GiantBomb ID": "10692"
}, },
{ {
"Title": "Xenosaga: Episode II - Jenseits von Gut und Böse", "Title": "Xenosaga: Episode II - Jenseits von Gut und B\u00f6se",
"Series": "Xeno, Xenosaga", "Series": "Xeno, Xenosaga",
"Developers": "Monolith Software, Inc.", "Developers": "Monolith Software, Inc.",
"Date Released": "2005-02-15", "Date Released": "2005-02-15",
@ -15141,7 +15136,7 @@
"GiantBomb ID": "11602" "GiantBomb ID": "11602"
}, },
{ {
"Title": "Pokémon Snap", "Title": "Pok\u00e9mon Snap",
"Series": "Pok\\u00e9mon, Pok\\u00e9mon Snap", "Series": "Pok\\u00e9mon, Pok\\u00e9mon Snap",
"Developers": "HAL Laboratory, Inc., Pax Softonica", "Developers": "HAL Laboratory, Inc., Pax Softonica",
"Date Released": "1999-03-21", "Date Released": "1999-03-21",
@ -15647,7 +15642,7 @@
"GiantBomb ID": "5764" "GiantBomb ID": "5764"
}, },
{ {
"Title": "Pokémon Emerald", "Title": "Pok\u00e9mon Emerald",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2004-09-16", "Date Released": "2004-09-16",
@ -16170,7 +16165,7 @@
"GiantBomb ID": "11023" "GiantBomb ID": "11023"
}, },
{ {
"Title": "Pokémon Crystal", "Title": "Pok\u00e9mon Crystal",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2000-12-14", "Date Released": "2000-12-14",
@ -16277,7 +16272,7 @@
"GiantBomb ID": "20155" "GiantBomb ID": "20155"
}, },
{ {
"Title": "Please, Dont Touch Anything", "Title": "Please, Don\u2019t Touch Anything",
"Developers": "Four Quarters", "Developers": "Four Quarters",
"Date Released": "2015-03-26", "Date Released": "2015-03-26",
"GiantBomb ID": "49275" "GiantBomb ID": "49275"
@ -16351,7 +16346,7 @@
"GiantBomb ID": "38900" "GiantBomb ID": "38900"
}, },
{ {
"Title": "Déjà Vu II: Lost in Las Vegas", "Title": "D\u00e9j\u00e0 Vu II: Lost in Las Vegas",
"Series": "D\\u00e9j\\u00e0 Vu, MacVenture", "Series": "D\\u00e9j\\u00e0 Vu, MacVenture",
"Developers": "ICOM Simulations, Inc.", "Developers": "ICOM Simulations, Inc.",
"Date Released": "1988", "Date Released": "1988",
@ -16820,7 +16815,7 @@
"GiantBomb ID": "17690" "GiantBomb ID": "17690"
}, },
{ {
"Title": "Linda³ Again", "Title": "Linda\u00b3 Again",
"Developers": "Alfa System, MARS Corporation", "Developers": "Alfa System, MARS Corporation",
"Date Released": "1997-09-25", "Date Released": "1997-09-25",
"GiantBomb ID": "35981" "GiantBomb ID": "35981"
@ -17508,7 +17503,7 @@
"GiantBomb ID": "14615" "GiantBomb ID": "14615"
}, },
{ {
"Title": "Pokémon Platinum", "Title": "Pok\u00e9mon Platinum",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2008-09-13", "Date Released": "2008-09-13",
@ -17676,7 +17671,7 @@
"GiantBomb ID": "25651" "GiantBomb ID": "25651"
}, },
{ {
"Title": "Pokémon Black/White", "Title": "Pok\u00e9mon Black/White",
"Series": "Pok\\u00e9mon", "Series": "Pok\\u00e9mon",
"Developers": "Game Freak, Inc.", "Developers": "Game Freak, Inc.",
"Date Released": "2011-03-06", "Date Released": "2011-03-06",
@ -17764,13 +17759,6 @@
"Date Released": "2013-04-09", "Date Released": "2013-04-09",
"GiantBomb ID": "37673" "GiantBomb ID": "37673"
}, },
{
"Title": "Death Stranding",
"Platforms": "PC",
"Developers": "Kojima Productions",
"Date Released": "2019-11-08",
"GiantBomb ID": "54232"
},
{ {
"Title": "Grow Home", "Title": "Grow Home",
"Series": "Grow Home", "Series": "Grow Home",
@ -19312,7 +19300,7 @@
"GiantBomb ID": "306" "GiantBomb ID": "306"
}, },
{ {
"Title": "Imabikisō", "Title": "Imabikis\u014d",
"Series": "Sound Novel", "Series": "Sound Novel",
"Developers": "Chunsoft", "Developers": "Chunsoft",
"Date Released": "2007-10-25", "Date Released": "2007-10-25",
@ -19590,7 +19578,7 @@
"GiantBomb ID": "25961" "GiantBomb ID": "25961"
}, },
{ {
"Title": kami", "Title": "\u014ckami",
"Platforms": "PlayStation 2", "Platforms": "PlayStation 2",
"Series": "\\u014ckami", "Series": "\\u014ckami",
"Developers": "Clover Studio, Hexa Drive, Imagica Digitalscape Co., Ltd., Ready at Dawn Studios, LLC", "Developers": "Clover Studio, Hexa Drive, Imagica Digitalscape Co., Ltd., Ready at Dawn Studios, LLC",
@ -21658,7 +21646,7 @@
"GiantBomb ID": "24415" "GiantBomb ID": "24415"
}, },
{ {
"Title": "X³: Reunion", "Title": "X\u00b3: Reunion",
"Platforms": "PC", "Platforms": "PC",
"Series": "X", "Series": "X",
"Developers": "Egosoft", "Developers": "Egosoft",

View file

@ -0,0 +1,61 @@
import json
with open(f"./scripts/grouvee.json", "r", encoding="utf-8") as log_file:
orig_log_items = json.load(log_file)
for log in ["log", "current", "wishlist"]:
print(f"Processing {log}")
with open(f"./data/games/{log}.json", "r", encoding="utf-8") as log_file:
log_items = json.load(log_file)
for i, item in enumerate(log_items):
print(f"Processing {item['Title']}...")
if "GiantBomb ID" in item:
orig_item = [""]
if "" != item["GiantBomb ID"]:
orig_item = [
orig_item
for orig_item in orig_log_items
if orig_item["giantbomb_id"] == int(item["GiantBomb ID"])
]
elif "" == item["GiantBomb ID"]:
orig_item = [
orig_item
for orig_item in orig_log_items
if orig_item["name"] == item["Title"]
]
if [] == orig_item:
print(f"No item {item['Title']} found in original log!")
log_items[i] = item
break
elif 1 < len(orig_item):
raise Exception(f"Multiple items returned for {item['Title']}!")
else:
orig_item = orig_item[0]
if "Wish List" in orig_item["shelves"]:
item["date_added"] = orig_item["shelves"]["Wish List"]["date_added"]
elif "Backlog" in orig_item["shelves"]:
item["date_added"] = orig_item["shelves"]["Backlog"]["date_added"]
elif "Played" in orig_item["shelves"] and "log" == log:
item["date_added"] = orig_item["shelves"]["Played"]["date_added"]
else:
print(f"No date_added for {item['Title']}!")
log_items[i] = item
print(f"Finished processing {item['Title']}.")
with open(f"./data/games/{log}.json", "w", encoding="utf-8") as log_file:
json.dump(log_items, log_file, indent=4)
print(f"Finished processing {log}.")

View file

@ -13,6 +13,7 @@ from dotenv import load_dotenv
authors = [] authors = []
def setup_logger(name="add_item"): def setup_logger(name="add_item"):
"""Set up the logger for console and file""" """Set up the logger for console and file"""
@ -50,14 +51,18 @@ if "" == TVDB_API_KEY:
logger.error("TVDB API key not found") logger.error("TVDB API key not found")
def return_if_exists(item_id, media_type, log) -> dict|None: def return_if_exists(item_id, media_type, log) -> dict | None:
"""Returns an item if it exists in the requested log""" """Returns an item if it exists in the requested log"""
logger.info(f"Checking for '{item_id}' in '{log}'") logger.info(f"Checking for '{item_id}' in '{log}'")
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file: with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
log_items = json.load(log_file) log_items = json.load(log_file)
existing_items = [log_item for log_item in log_items if "id" in log_item and log_item['id'] == int(item_id)] existing_items = [
log_item
for log_item in log_items
if "id" in log_item and log_item["id"] == int(item_id)
]
if len(existing_items) > 0: if len(existing_items) > 0:
logger.info(f"Found item in '{log}'") logger.info(f"Found item in '{log}'")
return existing_items[-1] return existing_items[-1]
@ -68,15 +73,19 @@ def delete_existing(item_id, media_type, log) -> None:
"""Deletes an item from a log if it matches the ID""" """Deletes an item from a log if it matches the ID"""
logger.info(f"Deleting '{item_id}' from '{log}'") logger.info(f"Deleting '{item_id}' from '{log}'")
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file: with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
log_items = json.load(log_file) log_items = json.load(log_file)
old_len = len(log_items) old_len = len(log_items)
log_items = [log_item for log_item in log_items if "id" not in log_item or ("id" in log_item and log_item['id'] != int(item_id))] log_items = [
log_item
for log_item in log_items
if "id" not in log_item or ("id" in log_item and log_item["id"] != int(item_id))
]
if len(log_items) < (old_len - 1): if len(log_items) < (old_len - 1):
raise Exception("More than one deletion made, discarding…") raise Exception("More than one deletion made, discarding…")
with open(f"./data/{media_type}/{log}.json", "w", encoding='utf-8') as log_file: with open(f"./data/{media_type}/{log}.json", "w", encoding="utf-8") as log_file:
json.dump(log_items, log_file, indent=4) json.dump(log_items, log_file, indent=4)
logger.info(f"'{item_id}' deleted from '{log}'") logger.info(f"'{item_id}' deleted from '{log}'")
@ -94,8 +103,15 @@ def check_for_existing(item_id, media_type, log) -> dict[dict, str]:
existing_item["is_repeat"] = True existing_item["is_repeat"] = True
return existing_item, None return existing_item, None
for log_to_check in [p_log for p_log in ["log", "current", "wishlist"] if p_log != log]: for log_to_check in [
if ("current" == log_to_check and media_type in ["books", "games", "tv-series"]) or ("wishlist" == log_to_check and media_type in ["books", "games", "films", "tv-series"]): p_log for p_log in ["log", "current", "wishlist"] if p_log != log
]:
if (
"current" == log_to_check and media_type in ["books", "games", "tv-series"]
) or (
"wishlist" == log_to_check
and media_type in ["books", "games", "films", "tv-series"]
):
existing_item = return_if_exists(item_id, media_type, log_to_check) existing_item = return_if_exists(item_id, media_type, log_to_check)
if existing_item is not None: if existing_item is not None:
return existing_item, log_to_check return existing_item, log_to_check
@ -156,12 +172,12 @@ def add_item_to_log(item_id, media_type, log) -> None:
# Save changes # Save changes
logger.info(f"Adding {media_type} to {log}") logger.info(f"Adding {media_type} to {log}")
with open(f"./data/{media_type}/{log}.json", "r", encoding='utf-8') as log_file: with open(f"./data/{media_type}/{log}.json", "r", encoding="utf-8") as log_file:
log_items = json.load(log_file) log_items = json.load(log_file)
log_items.insert(0, item) log_items.insert(0, item)
with open(f"./data/{media_type}/{log}.json", "w", encoding='utf-8') as log_file: with open(f"./data/{media_type}/{log}.json", "w", encoding="utf-8") as log_file:
json.dump(log_items, log_file, indent=4) json.dump(log_items, log_file, indent=4)
logger.info(f"Added {media_type} {item_id} to {log}") logger.info(f"Added {media_type} {item_id} to {log}")
@ -177,10 +193,12 @@ def import_by_id(import_id, media_type) -> dict:
return import_from_tmdb_by_id(import_id, media_type) return import_from_tmdb_by_id(import_id, media_type)
if media_type in ["tv-episodes"]: if media_type in ["tv-episodes"]:
return #import_from_tvdb_by_id(import_id, media_type) return # import_from_tvdb_by_id(import_id, media_type)
if media_type in ["books"]: if media_type in ["books"]:
return import_from_openlibrary_by_id(import_id, media_type) return import_from_openlibrary_by_id(
"".join(re.findall(r"\d+", import_id)), media_type
)
def import_from_tmdb_by_id(tmdb_id, media_type) -> dict: def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
@ -191,9 +209,7 @@ def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
# Sending API request # Sending API request
response = requests.get( response = requests.get(
api_url, api_url, headers={"Authorization": f"Bearer {TMDB_API_KEY}"}, timeout=15
headers={"Authorization": f"Bearer {TMDB_API_KEY}"},
timeout=15
) )
# Process the response # Process the response
@ -212,14 +228,8 @@ def import_from_tmdb_by_id(tmdb_id, media_type) -> dict:
response_data = json.loads(response.text) response_data = json.loads(response.text)
if 1 == len(response_data):
item = response_data[0]
elif 0 == len(response_data):
raise Exception(f"Returned no results for {tmdb_id}")
# Modify the returned result to add additional data # Modify the returned result to add additional data
return cleanup_result(item, media_type) return cleanup_result(response_data, media_type)
def import_from_openlibrary_by_id(isbn, media_type) -> dict: def import_from_openlibrary_by_id(isbn, media_type) -> dict:
@ -253,10 +263,12 @@ def import_from_openlibrary_by_id(isbn, media_type) -> dict:
for i, sub_item in enumerate(item[key]): for i, sub_item in enumerate(item[key]):
item[key][i] = import_from_openlibrary_by_ol_key(sub_item["key"]) item[key][i] = import_from_openlibrary_by_ol_key(sub_item["key"])
if "works" in item: if "works" in item:
if len(item["works"]) > 1: if len(item["works"]) > 1:
raise Exception(f"Multiple works found for {isbn}") print(f"Multiple works found for {isbn}:")
print(item["works"])
idx = input(f"Select ID to use [0-{len(item['works'])-1}]: ")
item["works"][0] = item["works"][int(idx)]
item["work"] = item["works"][0] item["work"] = item["works"][0]
del item["works"] del item["works"]
@ -275,20 +287,28 @@ def import_from_openlibrary_by_ol_key(key) -> dict:
_, mode, ol_id = key.split("/") _, mode, ol_id = key.split("/")
if "authors" == mode: if "authors" == mode:
with open(f"./scripts/caching/authors.json", "r", encoding='utf-8') as authors_cache: with open(
f"./scripts/caching/authors.json", "r", encoding="utf-8"
) as authors_cache:
cached_authors = json.load(authors_cache) cached_authors = json.load(authors_cache)
if mode in ["works", "authors"]: if mode in ["works", "authors"]:
if "authors" == mode: if "authors" == mode:
matched_cached_authors = [aut for aut in cached_authors if aut['id'] == ol_id] matched_cached_authors = [
aut for aut in cached_authors if aut["id"] == ol_id
]
if len(matched_cached_authors) == 1: if len(matched_cached_authors) == 1:
logging.info(f"Found cached author '{matched_cached_authors[0]['name']}'") logging.info(
f"Found cached author '{matched_cached_authors[0]['name']}'"
)
return matched_cached_authors[0] return matched_cached_authors[0]
api_url = f"https://openlibrary.org{key}" api_url = f"https://openlibrary.org{key}"
# Sending API request # Sending API request
response = requests.get(api_url, headers={"accept": "application/json"}, timeout=15) response = requests.get(
api_url, headers={"accept": "application/json"}, timeout=15
)
# Process the response # Process the response
if 200 == response.status_code: if 200 == response.status_code:
@ -316,9 +336,7 @@ def import_from_openlibrary_by_ol_key(key) -> dict:
logger.info(f"Caching author '{author['name']}'") logger.info(f"Caching author '{author['name']}'")
cached_authors.append(author) cached_authors.append(author)
with open( with open(
f"./scripts/caching/authors.json", f"./scripts/caching/authors.json", "w", encoding="utf-8"
"w",
encoding='utf-8'
) as authors_cache: ) as authors_cache:
json.dump(cached_authors, authors_cache, indent=4) json.dump(cached_authors, authors_cache, indent=4)
logger.info(f"Author '{author['name']}' cached!") logger.info(f"Author '{author['name']}' cached!")
@ -345,6 +363,7 @@ def cleanup_result(item, media_type) -> dict:
for field_name in [ for field_name in [
"adult", # TMDB "adult", # TMDB
"backdrop_path", # TMDB "backdrop_path", # TMDB
"budget", # TMDB
"copyright_date", # OpenLibrary "copyright_date", # OpenLibrary
"classifications", # OpenLibrary "classifications", # OpenLibrary
"created", # OpenLibrary "created", # OpenLibrary
@ -352,6 +371,7 @@ def cleanup_result(item, media_type) -> dict:
"episode_type", # TMDB "episode_type", # TMDB
"first_sentence", # OpenLibrary "first_sentence", # OpenLibrary
"genre_ids", # TMDB "genre_ids", # TMDB
"homepage", # TMDB
"identifiers", # OpenLibrary "identifiers", # OpenLibrary
"media_type", # TMDB "media_type", # TMDB
"last_modified", # OpenLibrary "last_modified", # OpenLibrary
@ -366,11 +386,15 @@ def cleanup_result(item, media_type) -> dict:
"physical_dimensions", # OpenLibrary "physical_dimensions", # OpenLibrary
"popularity", # TMDB "popularity", # TMDB
"production_code", # TMDB "production_code", # TMDB
"production_companies", # TMDB
"revenue", # TMDB
"revision", # OpenLibrary "revision", # OpenLibrary
"runtime", # TMDB "runtime", # TMDB
"source_records", # OpenLibrary "source_records", # OpenLibrary
"status", # TMDB
"still_path", # TMDB "still_path", # TMDB
"table_of_contents", # OpenLibrary "table_of_contents", # OpenLibrary
"tagline", # TMDB
"type", # OpenLibrary "type", # OpenLibrary
"uri_descriptions", # OpenLibrary "uri_descriptions", # OpenLibrary
"url", # OpenLibrary "url", # OpenLibrary
@ -413,21 +437,28 @@ def cleanup_result(item, media_type) -> dict:
] ]
if "translation_of" in item: if "translation_of" in item:
if item["translation_of"].split(":")[0].lower() == item["work"]["title"].split(":")[0].lower(): if not (
del item["translation_of"] item["translation_of"].split(":")[0].lower()
else: == item["work"]["title"].split(":")[0].lower()
):
logger.warn(
f"translation_of '{item['translation_of']}' \
is different to work title '{item['work']['title']}'"
)
if 'y' != input("Accept change? [y|n]: "):
raise Exception( raise Exception(
f"translation_of '{item['translation_of']}' \ f"translation_of '{item['translation_of']}' \
is different to work title '{item['work']['title']}'" is different to work title '{item['work']['title']}'"
) )
del item["translation_of"]
if "translated_from" in item: if "translated_from" in item:
if len(item["translated_from"]) > 1: if len(item["translated_from"]) > 1:
raise Exception("Multiple translated_from results") raise Exception("Multiple translated_from results")
item["work"]["original_language"] = item["translated_from"][0][ item["work"]["original_language"] = item["translated_from"][0]["key"].split(
"key" "/"
].split("/")[2] )[2]
del item["translated_from"] del item["translated_from"]
if "date_added" not in item: if "date_added" not in item:
@ -459,7 +490,7 @@ def main() -> None:
log = input("Enter log to update [log|current|wishlist]: ") log = input("Enter log to update [log|current|wishlist]: ")
while re.search("[0-9]+", item_id) is None: while re.search("[0-9]+", item_id) is None:
item_id = input("Enter ISBN: ") item_id = "".join(re.findall(r"\d+", input("Enter ISBN: ")))
elif "tv-episodes" == media_type: elif "tv-episodes" == media_type:
log = "log" log = "log"