add book adding to script, update tv series'
This commit is contained in:
parent
5b92efcc98
commit
06957e053f
11 changed files with 15654 additions and 14769 deletions
|
@ -1,3 +1,3 @@
|
||||||
TMDB_API_KEY=
|
TMDB_API_KEY=
|
||||||
|
|
||||||
TVDB_API_KEY=
|
TVDB_API_KEY=
|
||||||
OPENLIBRARY_API_KEY=
|
|
||||||
|
|
|
@ -1,4 +1,34 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"title": "Le K\u00e2ma S\u00fbtra",
|
||||||
|
"publishers": [
|
||||||
|
"\u00c9ditions Gl\u00e9nat"
|
||||||
|
],
|
||||||
|
"publish_date": "2017-10-25",
|
||||||
|
"covers": [
|
||||||
|
14567221
|
||||||
|
],
|
||||||
|
"languages": [
|
||||||
|
"fre"
|
||||||
|
],
|
||||||
|
"description": "Osez ouvrir votre \u00e2me, votre c\u0153ur et votre corps \u00e0 la plus excitante initiation \u00e0 l'amour. Osez ouvrir le kama sutra de Manara.\r\n\r\nParva superbe blonde, entre par accident en possession d\u2019une ceinture tr\u00e8s \u00e9trange\u2026 puisqu\u2019elle s\u2019av\u00e8re \u00eatre l\u2019\u00e9crin dans lequel a \u00e9t\u00e9 emprisonn\u00e9 l\u2019esprit du dieu hindou Shiva. Celui-ci va initier la jeune femme au Kama Sutra, l\u2019emmenant en r\u00eave parcourir l\u2019Inde et ses myst\u00e8res sensuels et \u00e9rotiques dans le but de retrouver forme humaine.\r\n\r\nNouvelle \u00e9dition de cet incontournable de Manara, dans une nouvelle traduction enrichie d\u2019une pr\u00e9face in\u00e9dite de l\u2019auteur.",
|
||||||
|
"physical_format": "Hardcover",
|
||||||
|
"number_of_pages": 72,
|
||||||
|
"isbn_13": "9782344025642",
|
||||||
|
"edition_name": "24X32 (Gl\u00e9nat BD)",
|
||||||
|
"work": {
|
||||||
|
"id": "OL608214W",
|
||||||
|
"title": "Kamasutra",
|
||||||
|
"original_language": "ita"
|
||||||
|
},
|
||||||
|
"id": "OL50543310M",
|
||||||
|
"published_in": "Grenoble",
|
||||||
|
"date_added": "2024-01-15",
|
||||||
|
"date_started": "2024-01-15",
|
||||||
|
"date_finished": "2024-01-15",
|
||||||
|
"added_by_id": "9782344025642",
|
||||||
|
"comments": "Read in French"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"Title": "Montpellier",
|
"Title": "Montpellier",
|
||||||
"Author": "Jean du Boiseberranger",
|
"Author": "Jean du Boiseberranger",
|
||||||
|
@ -28,7 +58,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Additional Authors": "Aleida Guevara March",
|
"Additional Authors": "Aleida Guevara March",
|
||||||
"Author": "Ernesto “Che” Guevara",
|
"Author": "Ernesto \u201cChe\u201d Guevara",
|
||||||
"Binding": "Paperback",
|
"Binding": "Paperback",
|
||||||
"Date Added": "2023-05-12",
|
"Date Added": "2023-05-12",
|
||||||
"Date Finished": "2023-12-12",
|
"Date Finished": "2023-12-12",
|
||||||
|
@ -38,7 +68,7 @@
|
||||||
"Original Publication Year": "1993",
|
"Original Publication Year": "1993",
|
||||||
"Publisher": "Ocean Sur",
|
"Publisher": "Ocean Sur",
|
||||||
"Read Count": 1,
|
"Read Count": 1,
|
||||||
"Title": "Diarios de Motocicleta: Notas de Viaje por América Latina",
|
"Title": "Diarios de Motocicleta: Notas de Viaje por Am\u00e9rica Latina",
|
||||||
"TitleLang": "es",
|
"TitleLang": "es",
|
||||||
"TitleTrans": "The Motorcycle Diaries: Notes on a Latin American Journey",
|
"TitleTrans": "The Motorcycle Diaries: Notes on a Latin American Journey",
|
||||||
"Year Published": "2004"
|
"Year Published": "2004"
|
||||||
|
@ -102,7 +132,7 @@
|
||||||
"ISBN13": "9781905492220"
|
"ISBN13": "9781905492220"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Field Guide to Understanding ‘Human Error’",
|
"Title": "The Field Guide to Understanding \u2018Human Error\u2019",
|
||||||
"Binding": "Paperback",
|
"Binding": "Paperback",
|
||||||
"Publisher": "Routledge",
|
"Publisher": "Routledge",
|
||||||
"Edition": "Third Edition",
|
"Edition": "Third Edition",
|
||||||
|
@ -238,7 +268,7 @@
|
||||||
"Date Started": "2023-09-01",
|
"Date Started": "2023-09-01",
|
||||||
"Publisher": "Active Distribution",
|
"Publisher": "Active Distribution",
|
||||||
"Read Count": 1,
|
"Read Count": 1,
|
||||||
"Title": "Rojava Anarchists: One Year Since the Turkish Invasion of Rojava: An Interview with Tekoşîna Anarşîst",
|
"Title": "Rojava Anarchists: One Year Since the Turkish Invasion of Rojava: An Interview with Teko\u015f\u00eena Anar\u015f\u00eest",
|
||||||
"Year Published": "2020"
|
"Year Published": "2020"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -443,7 +473,7 @@
|
||||||
"Title": "History Will Absolve Me",
|
"Title": "History Will Absolve Me",
|
||||||
"Author": "Fidel Castro",
|
"Author": "Fidel Castro",
|
||||||
"ISBN13": "9789590107832",
|
"ISBN13": "9789590107832",
|
||||||
"Publisher": "Editorial Política",
|
"Publisher": "Editorial Pol\u00edtica",
|
||||||
"Binding": "Paperback",
|
"Binding": "Paperback",
|
||||||
"Number of Pages": 64,
|
"Number of Pages": 64,
|
||||||
"Year Published": "2002",
|
"Year Published": "2002",
|
||||||
|
@ -486,8 +516,8 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Dreams of Freedom: A Ricardo Flores Magón Reader",
|
"Title": "Dreams of Freedom: A Ricardo Flores Mag\u00f3n Reader",
|
||||||
"Author": "Ricardo Flores Magón",
|
"Author": "Ricardo Flores Mag\u00f3n",
|
||||||
"Additional Authors": "Charles Buf, Mitchell Cowen Verter",
|
"Additional Authors": "Charles Buf, Mitchell Cowen Verter",
|
||||||
"ISBN": "1904859240",
|
"ISBN": "1904859240",
|
||||||
"ISBN13": "9781904859246",
|
"ISBN13": "9781904859246",
|
||||||
|
@ -652,7 +682,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Critical Thought in the Face of the Capitalist Hydra I: Contributions by the Sixth Commission of the EZLN",
|
"Title": "Critical Thought in the Face of the Capitalist Hydra I: Contributions by the Sixth Commission of the EZLN",
|
||||||
"Author": "Ejército Zapatista de Liberación Nacional",
|
"Author": "Ej\u00e9rcito Zapatista de Liberaci\u00f3n Nacional",
|
||||||
"ISBN": "979799325",
|
"ISBN": "979799325",
|
||||||
"ISBN13": "9780979799327",
|
"ISBN13": "9780979799327",
|
||||||
"Publisher": "Paperboat Press",
|
"Publisher": "Paperboat Press",
|
||||||
|
@ -733,7 +763,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Sara: My Whole Life Was a Struggle",
|
"Title": "Sara: My Whole Life Was a Struggle",
|
||||||
"Author": "Sakine Cansız",
|
"Author": "Sakine Cans\u0131z",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": "9781786802910",
|
"ISBN13": "9781786802910",
|
||||||
"Publisher": "Pluto Press",
|
"Publisher": "Pluto Press",
|
||||||
|
@ -815,7 +845,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Brigadistes: Lives for Liberty",
|
"Title": "Brigadistes: Lives for Liberty",
|
||||||
"Author": "Jordi Martí-Rueda",
|
"Author": "Jordi Mart\u00ed-Rueda",
|
||||||
"Additional Authors": "Mary Ann Newman",
|
"Additional Authors": "Mary Ann Newman",
|
||||||
"ISBN": "745347126",
|
"ISBN": "745347126",
|
||||||
"ISBN13": "9780745347127",
|
"ISBN13": "9780745347127",
|
||||||
|
@ -833,7 +863,7 @@
|
||||||
"Title": "Manifesto for a Democratic Civilization, Volume I - Civilization: The Age of Masked Gods and Disguised Kings",
|
"Title": "Manifesto for a Democratic Civilization, Volume I - Civilization: The Age of Masked Gods and Disguised Kings",
|
||||||
"Series": "Manifesto for a Democratic Civilization",
|
"Series": "Manifesto for a Democratic Civilization",
|
||||||
"Series Number": 1,
|
"Series Number": 1,
|
||||||
"Author": "Abdullah Öcalan",
|
"Author": "Abdullah \u00d6calan",
|
||||||
"Additional Authors": "David Graeber",
|
"Additional Authors": "David Graeber",
|
||||||
"ISBN": "8293064420",
|
"ISBN": "8293064420",
|
||||||
"ISBN13": "9788293064428",
|
"ISBN13": "9788293064428",
|
||||||
|
@ -960,9 +990,9 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Building Free Life: Dialogues with Öcalan",
|
"Title": "Building Free Life: Dialogues with \u00d6calan",
|
||||||
"Author": "International Initiative “Freedom for Abdullah Öcalan – Peace in Kurdistan”",
|
"Author": "International Initiative \u201cFreedom for Abdullah \u00d6calan \u2013 Peace in Kurdistan\u201d",
|
||||||
"Additional Authors": "John Holloway, Norman Paech, Ekkehard Sauermann, Immanuel Wallerstein, Arnaldo Otegi, Barry K. Gills, Antonio Negri, Peter Lamborn Wilson, Donald H. Matthews, Thomas Jeffrey Miley, Muriel González Athenas, Radha D'Souza, Andrej Grubačić, Raul Zibechi, Mechthild Exo, David Graeber, Fabian Scheidler, Damian Gerber and Shannon Brincat, Patrick Huff, Nazan Üstündağ, Michael Panser, Abdullah Öcalan",
|
"Additional Authors": "John Holloway, Norman Paech, Ekkehard Sauermann, Immanuel Wallerstein, Arnaldo Otegi, Barry K. Gills, Antonio Negri, Peter Lamborn Wilson, Donald H. Matthews, Thomas Jeffrey Miley, Muriel Gonz\u00e1lez Athenas, Radha D'Souza, Andrej Gruba\u010di\u0107, Raul Zibechi, Mechthild Exo, David Graeber, Fabian Scheidler, Damian Gerber and Shannon Brincat, Patrick Huff, Nazan \u00dcst\u00fcnda\u011f, Michael Panser, Abdullah \u00d6calan",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": "9781629637686",
|
"ISBN13": "9781629637686",
|
||||||
"Publisher": "PM Press",
|
"Publisher": "PM Press",
|
||||||
|
@ -1152,7 +1182,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "If This is a Woman: Inside Ravensbrück: Hitler's Concentration Camp for Women",
|
"Title": "If This is a Woman: Inside Ravensbr\u00fcck: Hitler's Concentration Camp for Women",
|
||||||
"Author": "Sarah Helm",
|
"Author": "Sarah Helm",
|
||||||
"ISBN": "1408701073",
|
"ISBN": "1408701073",
|
||||||
"ISBN13": "9781408701072",
|
"ISBN13": "9781408701072",
|
||||||
|
@ -1251,7 +1281,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "How to Read Lacan",
|
"Title": "How to Read Lacan",
|
||||||
"Author": "Slavoj Žižek",
|
"Author": "Slavoj \u017di\u017eek",
|
||||||
"ISBN": "1862078947",
|
"ISBN": "1862078947",
|
||||||
"ISBN13": "9781862078949",
|
"ISBN13": "9781862078949",
|
||||||
"Publisher": "Granta Books",
|
"Publisher": "Granta Books",
|
||||||
|
@ -1532,7 +1562,7 @@
|
||||||
"Read Count": 2
|
"Read Count": 2
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Phaic Tăn: Sunstroke on a Shoestring",
|
"Title": "Phaic T\u0103n: Sunstroke on a Shoestring",
|
||||||
"Author": "Santo Cilauro",
|
"Author": "Santo Cilauro",
|
||||||
"Additional Authors": "Tom Gleisner, Rob Sitch",
|
"Additional Authors": "Tom Gleisner, Rob Sitch",
|
||||||
"ISBN": "811853659",
|
"ISBN": "811853659",
|
||||||
|
@ -1925,8 +1955,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Visions, voices, and practices of the zapatistas",
|
"Title": "Visions, voices, and practices of the zapatistas",
|
||||||
"Author": "Gilberto López y Rivas",
|
"Author": "Gilberto L\u00f3pez y Rivas",
|
||||||
"Additional Authors": "José Jorge Santiago S., David Barkin, Claudia Yadira Caballero Borja, Alejandra Jiménez Ramírez, Mariana Mora, Gustavo Esteva, Lau Kin Chi",
|
"Additional Authors": "Jos\u00e9 Jorge Santiago S., David Barkin, Claudia Yadira Caballero Borja, Alejandra Jim\u00e9nez Ram\u00edrez, Mariana Mora, Gustavo Esteva, Lau Kin Chi",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": null,
|
"ISBN13": null,
|
||||||
"Publisher": "Universidad de la Tierra Oaxaca",
|
"Publisher": "Universidad de la Tierra Oaxaca",
|
||||||
|
@ -1942,7 +1972,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "This Fire Never Dies: One Year With the PKK",
|
"Title": "This Fire Never Dies: One Year With the PKK",
|
||||||
"Author": "Fréderike Geerdink",
|
"Author": "Fr\u00e9derike Geerdink",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": "9788195031047",
|
"ISBN13": "9788195031047",
|
||||||
"Publisher": "Leftword Books",
|
"Publisher": "Leftword Books",
|
||||||
|
@ -1973,7 +2003,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Exquisite Rebel: The Essays of Voltairine de Cleyre — Anarchist, Feminist, Genius",
|
"Title": "Exquisite Rebel: The Essays of Voltairine de Cleyre \u2014 Anarchist, Feminist, Genius",
|
||||||
"Author": "Voltairine de Cleyre",
|
"Author": "Voltairine de Cleyre",
|
||||||
"Additional Authors": "Sharon Presley, Crispin Sartwell",
|
"Additional Authors": "Sharon Presley, Crispin Sartwell",
|
||||||
"ISBN": "791460940",
|
"ISBN": "791460940",
|
||||||
|
@ -1990,7 +2020,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Nestor Makhno — Anarchy's Cossack: The Struggle for Free Soviets in the Ukraine 1917–1921",
|
"Title": "Nestor Makhno \u2014 Anarchy's Cossack: The Struggle for Free Soviets in the Ukraine 1917\u20131921",
|
||||||
"Author": "Alexandre Skirda",
|
"Author": "Alexandre Skirda",
|
||||||
"Additional Authors": "Paul Sharkey",
|
"Additional Authors": "Paul Sharkey",
|
||||||
"ISBN": "1902593685",
|
"ISBN": "1902593685",
|
||||||
|
@ -2109,7 +2139,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Courage of Hopelessness: Chronicles of a Year of Acting Dangerously",
|
"Title": "The Courage of Hopelessness: Chronicles of a Year of Acting Dangerously",
|
||||||
"Author": "Slavoj Žižek",
|
"Author": "Slavoj \u017di\u017eek",
|
||||||
"ISBN": "241305586",
|
"ISBN": "241305586",
|
||||||
"ISBN13": "9780241305584",
|
"ISBN13": "9780241305584",
|
||||||
"Publisher": "Allen Lane",
|
"Publisher": "Allen Lane",
|
||||||
|
@ -2190,7 +2220,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "They Thought They Were Free: The Germans, 1933–45",
|
"Title": "They Thought They Were Free: The Germans, 1933\u201345",
|
||||||
"Author": "Milton Sanford Mayer",
|
"Author": "Milton Sanford Mayer",
|
||||||
"ISBN": "1299641024",
|
"ISBN": "1299641024",
|
||||||
"ISBN13": "9781299641020",
|
"ISBN13": "9781299641020",
|
||||||
|
@ -2353,7 +2383,7 @@
|
||||||
{
|
{
|
||||||
"Title": "To Dare Imagining: Rojava Revolution",
|
"Title": "To Dare Imagining: Rojava Revolution",
|
||||||
"Author": "Dilar Dirik",
|
"Author": "Dilar Dirik",
|
||||||
"Additional Authors": "David Levi Strauss, Michael Taussig, Peter Lamborn Wilson, Bill Weinberg, David Graeber, Havin Güneşer, Nazan Ustundag, Abdullah Öcalan",
|
"Additional Authors": "David Levi Strauss, Michael Taussig, Peter Lamborn Wilson, Bill Weinberg, David Graeber, Havin G\u00fcne\u015fer, Nazan Ustundag, Abdullah \u00d6calan",
|
||||||
"ISBN": "157027312X",
|
"ISBN": "157027312X",
|
||||||
"ISBN13": "9781570273124",
|
"ISBN13": "9781570273124",
|
||||||
"Publisher": "Autonomedia",
|
"Publisher": "Autonomedia",
|
||||||
|
@ -2513,7 +2543,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Seeker of Truth Who Insisted on Another World",
|
"Title": "The Seeker of Truth Who Insisted on Another World",
|
||||||
"Author": "Bager Nûjiyan",
|
"Author": "Bager N\u00fbjiyan",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": null,
|
"ISBN13": null,
|
||||||
"Publisher": "Internationalist Commune",
|
"Publisher": "Internationalist Commune",
|
||||||
|
@ -2560,7 +2590,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Les Misérables",
|
"Title": "Les Mis\u00e9rables",
|
||||||
"Author": "Victor Hugo",
|
"Author": "Victor Hugo",
|
||||||
"Additional Authors": "Norman Denny",
|
"Additional Authors": "Norman Denny",
|
||||||
"ISBN": "140444300",
|
"ISBN": "140444300",
|
||||||
|
@ -2578,7 +2608,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Riot Medicine",
|
"Title": "Riot Medicine",
|
||||||
"Author": "Håkan Geijer",
|
"Author": "H\u00e5kan Geijer",
|
||||||
"Additional Authors": "Audrey Huff, Bizhan Khodabande, Cat Paris, Citriii, drnSX42, snailsnail, ZEROC0IL",
|
"Additional Authors": "Audrey Huff, Bizhan Khodabande, Cat Paris, Citriii, drnSX42, snailsnail, ZEROC0IL",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": null,
|
"ISBN13": null,
|
||||||
|
@ -2867,7 +2897,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Catch-22: As recommended on BBC2’s Between the Covers",
|
"Title": "Catch-22: As recommended on BBC2\u2019s Between the Covers",
|
||||||
"Author": "Joseph Heller",
|
"Author": "Joseph Heller",
|
||||||
"ISBN": "99477319",
|
"ISBN": "99477319",
|
||||||
"ISBN13": "9780099477310",
|
"ISBN13": "9780099477310",
|
||||||
|
@ -2898,7 +2928,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "‘Exterminate All the Brutes’",
|
"Title": "\u2018Exterminate All the Brutes\u2019",
|
||||||
"Author": "Sven Lindqvist",
|
"Author": "Sven Lindqvist",
|
||||||
"Additional Authors": "Joan Tate",
|
"Additional Authors": "Joan Tate",
|
||||||
"ISBN": "1862075085",
|
"ISBN": "1862075085",
|
||||||
|
@ -2947,7 +2977,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Fanged Noumena: Collected Writings, 1987–2007",
|
"Title": "Fanged Noumena: Collected Writings, 1987\u20132007",
|
||||||
"Author": "Nick Land",
|
"Author": "Nick Land",
|
||||||
"Additional Authors": "Ray Brassier, Robin Mackay",
|
"Additional Authors": "Ray Brassier, Robin Mackay",
|
||||||
"ISBN": "095530878X",
|
"ISBN": "095530878X",
|
||||||
|
@ -3439,8 +3469,8 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Political Thought of Abdullah Öcalan: Kurdistan, Women's Revolution and Democratic Confederalism",
|
"Title": "The Political Thought of Abdullah \u00d6calan: Kurdistan, Women's Revolution and Democratic Confederalism",
|
||||||
"Author": "Abdullah Öcalan",
|
"Author": "Abdullah \u00d6calan",
|
||||||
"ISBN": "745399762",
|
"ISBN": "745399762",
|
||||||
"ISBN13": "9780745399768",
|
"ISBN13": "9780745399768",
|
||||||
"Publisher": "Pluto Press",
|
"Publisher": "Pluto Press",
|
||||||
|
@ -3958,8 +3988,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Journey to the End of the Night",
|
"Title": "Journey to the End of the Night",
|
||||||
"Author": "Louis-Ferdinand Céline",
|
"Author": "Louis-Ferdinand C\u00e9line",
|
||||||
"Additional Authors": "John Banville, André Derval",
|
"Additional Authors": "John Banville, Andr\u00e9 Derval",
|
||||||
"ISBN": "1847492401",
|
"ISBN": "1847492401",
|
||||||
"ISBN13": "9781847492401",
|
"ISBN13": "9781847492401",
|
||||||
"Publisher": "Alma Books",
|
"Publisher": "Alma Books",
|
||||||
|
@ -4788,7 +4818,7 @@
|
||||||
{
|
{
|
||||||
"Title": "The Art of Rhetoric",
|
"Title": "The Art of Rhetoric",
|
||||||
"Author": "Aristotle",
|
"Author": "Aristotle",
|
||||||
"Additional Authors": "Hugh Lawson-Tancred, Rudolf Kassel, Aristóteles",
|
"Additional Authors": "Hugh Lawson-Tancred, Rudolf Kassel, Arist\u00f3teles",
|
||||||
"ISBN": "140445102",
|
"ISBN": "140445102",
|
||||||
"ISBN13": "9780140445107",
|
"ISBN13": "9780140445107",
|
||||||
"Publisher": "Penguin Classics",
|
"Publisher": "Penguin Classics",
|
||||||
|
@ -5061,7 +5091,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Wieża Jaskółki (Saga o Wiedźminie, #4)",
|
"Title": "Wie\u017ca Jask\u00f3\u0142ki (Saga o Wied\u017aminie, #4)",
|
||||||
"Author": "Andrzej Sapkowski",
|
"Author": "Andrzej Sapkowski",
|
||||||
"ISBN": "8370541240",
|
"ISBN": "8370541240",
|
||||||
"ISBN13": "9788370541248",
|
"ISBN13": "9788370541248",
|
||||||
|
@ -5077,7 +5107,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Pani Jeziora (Saga o Wiedźminie, #5)",
|
"Title": "Pani Jeziora (Saga o Wied\u017aminie, #5)",
|
||||||
"Author": "Andrzej Sapkowski",
|
"Author": "Andrzej Sapkowski",
|
||||||
"ISBN": "8370541291",
|
"ISBN": "8370541291",
|
||||||
"ISBN13": "9788370541293",
|
"ISBN13": "9788370541293",
|
||||||
|
@ -5352,7 +5382,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Meditations on First Philosophy",
|
"Title": "Meditations on First Philosophy",
|
||||||
"Author": "René Descartes",
|
"Author": "Ren\u00e9 Descartes",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": "9781622971800",
|
"ISBN13": "9781622971800",
|
||||||
"Publisher": "Seedbox Press, LLC.",
|
"Publisher": "Seedbox Press, LLC.",
|
||||||
|
@ -5479,7 +5509,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Gödel, Escher, Bach: An Eternal Golden Braid",
|
"Title": "G\u00f6del, Escher, Bach: An Eternal Golden Braid",
|
||||||
"Author": "Douglas R. Hofstadter",
|
"Author": "Douglas R. Hofstadter",
|
||||||
"ISBN": "465026567",
|
"ISBN": "465026567",
|
||||||
"ISBN13": "9780465026562",
|
"ISBN13": "9780465026562",
|
||||||
|
@ -5529,7 +5559,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Storm of Steel",
|
"Title": "Storm of Steel",
|
||||||
"Author": "Ernst Jünger",
|
"Author": "Ernst J\u00fcnger",
|
||||||
"Additional Authors": "Michael Hofmann",
|
"Additional Authors": "Michael Hofmann",
|
||||||
"ISBN": "141186917",
|
"ISBN": "141186917",
|
||||||
"ISBN13": "9780141186917",
|
"ISBN13": "9780141186917",
|
||||||
|
@ -5952,7 +5982,7 @@
|
||||||
{
|
{
|
||||||
"Title": "The Rime of the Ancient Mariner",
|
"Title": "The Rime of the Ancient Mariner",
|
||||||
"Author": "Samuel Taylor Coleridge",
|
"Author": "Samuel Taylor Coleridge",
|
||||||
"Additional Authors": "Gustave Doré",
|
"Additional Authors": "Gustave Dor\u00e9",
|
||||||
"ISBN": "486223051",
|
"ISBN": "486223051",
|
||||||
"ISBN13": "9780486223056",
|
"ISBN13": "9780486223056",
|
||||||
"Publisher": "Dover Publications",
|
"Publisher": "Dover Publications",
|
||||||
|
@ -6066,8 +6096,8 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Witcher: Reasons of State",
|
"Title": "The Witcher: Reasons of State",
|
||||||
"Author": "Michał Gałek",
|
"Author": "Micha\u0142 Ga\u0142ek",
|
||||||
"Additional Authors": "Arkadiusz Klimek, Łukasz Poller",
|
"Additional Authors": "Arkadiusz Klimek, \u0141ukasz Poller",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": null,
|
"ISBN13": null,
|
||||||
"Publisher": "CD Projekt RED",
|
"Publisher": "CD Projekt RED",
|
||||||
|
@ -6195,9 +6225,9 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Crécy",
|
"Title": "Cr\u00e9cy",
|
||||||
"Author": "Warren Ellis",
|
"Author": "Warren Ellis",
|
||||||
"Additional Authors": "Raúlo Cáceres",
|
"Additional Authors": "Ra\u00falo C\u00e1ceres",
|
||||||
"ISBN": "1592910408",
|
"ISBN": "1592910408",
|
||||||
"ISBN13": "9781592910403",
|
"ISBN13": "9781592910403",
|
||||||
"Publisher": "Avatar Press",
|
"Publisher": "Avatar Press",
|
||||||
|
@ -6341,7 +6371,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Early Stuarts, 1603–1660",
|
"Title": "The Early Stuarts, 1603\u20131660",
|
||||||
"Author": "Godfrey Davies",
|
"Author": "Godfrey Davies",
|
||||||
"ISBN": "198217048",
|
"ISBN": "198217048",
|
||||||
"ISBN13": "9780198217046",
|
"ISBN13": "9780198217046",
|
||||||
|
@ -6465,7 +6495,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Prince",
|
"Title": "The Prince",
|
||||||
"Author": "Niccolò Machiavelli",
|
"Author": "Niccol\u00f2 Machiavelli",
|
||||||
"Additional Authors": "George Bull, Anthony Grafton",
|
"Additional Authors": "George Bull, Anthony Grafton",
|
||||||
"ISBN": "140449159",
|
"ISBN": "140449159",
|
||||||
"ISBN13": "9780140449150",
|
"ISBN13": "9780140449150",
|
||||||
|
@ -6870,7 +6900,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "Le Dernier Jour d'un Condamné",
|
"Title": "Le Dernier Jour d'un Condamn\u00e9",
|
||||||
"Author": "Victor Hugo",
|
"Author": "Victor Hugo",
|
||||||
"ISBN": null,
|
"ISBN": null,
|
||||||
"ISBN13": null,
|
"ISBN13": null,
|
||||||
|
@ -7688,7 +7718,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "England In Crisis, 1640–60 (Heinemann Advanced History)",
|
"Title": "England In Crisis, 1640\u201360 (Heinemann Advanced History)",
|
||||||
"Author": "David Sharp",
|
"Author": "David Sharp",
|
||||||
"ISBN": "435327143",
|
"ISBN": "435327143",
|
||||||
"ISBN13": "9780435327149",
|
"ISBN13": "9780435327149",
|
||||||
|
@ -7734,7 +7764,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Coming Of The Civil War, 1603–49 (Heinemann Advanced History)",
|
"Title": "The Coming Of The Civil War, 1603\u201349 (Heinemann Advanced History)",
|
||||||
"Author": "David Sharp",
|
"Author": "David Sharp",
|
||||||
"ISBN": "435327135",
|
"ISBN": "435327135",
|
||||||
"ISBN13": "9780435327132",
|
"ISBN13": "9780435327132",
|
||||||
|
@ -7752,7 +7782,7 @@
|
||||||
{
|
{
|
||||||
"Title": "Ghost Hunt, Vol. 1 (Ghost Hunt, #1)",
|
"Title": "Ghost Hunt, Vol. 1 (Ghost Hunt, #1)",
|
||||||
"Author": "Shiho Inada",
|
"Author": "Shiho Inada",
|
||||||
"Additional Authors": "Fuyumi Ono, 小野不由美",
|
"Additional Authors": "Fuyumi Ono, \u5c0f\u91ce\u4e0d\u7531\u7f8e",
|
||||||
"ISBN": "345486242",
|
"ISBN": "345486242",
|
||||||
"ISBN13": "9780345486240",
|
"ISBN13": "9780345486240",
|
||||||
"Publisher": "Del Rey",
|
"Publisher": "Del Rey",
|
||||||
|
@ -8100,7 +8130,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Narrow Road to the Deep North and Other Travel Sketches (Yuasa)",
|
"Title": "The Narrow Road to the Deep North and Other Travel Sketches (Yuasa)",
|
||||||
"Author": "Matsuo Bashō",
|
"Author": "Matsuo Bash\u014d",
|
||||||
"Additional Authors": "Nobuyuki Yuasa",
|
"Additional Authors": "Nobuyuki Yuasa",
|
||||||
"ISBN": "140441859",
|
"ISBN": "140441859",
|
||||||
"ISBN13": "9780140441857",
|
"ISBN13": "9780140441857",
|
||||||
|
@ -8133,7 +8163,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "D-Day Normandy: Weapons, Uniforms, Military Equipment",
|
"Title": "D-Day Normandy: Weapons, Uniforms, Military Equipment",
|
||||||
"Author": "François Bertin",
|
"Author": "Fran\u00e7ois Bertin",
|
||||||
"Additional Authors": "id2m",
|
"Additional Authors": "id2m",
|
||||||
"ISBN": "1932033777",
|
"ISBN": "1932033777",
|
||||||
"ISBN13": "9781932033779",
|
"ISBN13": "9781932033779",
|
||||||
|
@ -8692,7 +8722,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Far Side®",
|
"Title": "The Far Side\u00ae",
|
||||||
"Author": "Gary Larson",
|
"Author": "Gary Larson",
|
||||||
"ISBN": "836212002",
|
"ISBN": "836212002",
|
||||||
"ISBN13": "9780836212006",
|
"ISBN13": "9780836212006",
|
||||||
|
@ -8722,7 +8752,7 @@
|
||||||
"Read Count": 1
|
"Read Count": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Title": "The Far Side® Gallery 2",
|
"Title": "The Far Side\u00ae Gallery 2",
|
||||||
"Author": "Gary Larson",
|
"Author": "Gary Larson",
|
||||||
"Additional Authors": "Stephen King",
|
"Additional Authors": "Stephen King",
|
||||||
"ISBN": "836220854",
|
"ISBN": "836220854",
|
||||||
|
@ -8953,7 +8983,7 @@
|
||||||
{
|
{
|
||||||
"Title": "d'artiste Character Modeling 3: Digital Artists Master Class",
|
"Title": "d'artiste Character Modeling 3: Digital Artists Master Class",
|
||||||
"Author": "Jan-Bart van Beek",
|
"Author": "Jan-Bart van Beek",
|
||||||
"Additional Authors": "Giovanni Nakpil, César Da Col, Daniel P. Wade",
|
"Additional Authors": "Giovanni Nakpil, C\u00e9sar Da Col, Daniel P. Wade",
|
||||||
"ISBN": "1921002670",
|
"ISBN": "1921002670",
|
||||||
"ISBN13": "9781921002670",
|
"ISBN13": "9781921002670",
|
||||||
"Publisher": "Ballistic Publishing",
|
"Publisher": "Ballistic Publishing",
|
||||||
|
@ -9154,7 +9184,7 @@
|
||||||
{
|
{
|
||||||
"Title": "Batman Black and White, Vol. 1",
|
"Title": "Batman Black and White, Vol. 1",
|
||||||
"Author": "Mark Chiarello",
|
"Author": "Mark Chiarello",
|
||||||
"Additional Authors": "Ted McKeever, Bruce Timm, Joe Kubert, Howard Chaykin, Archie Goodwin, José Muñoz, Walter Simonson, Jan Strnad, Richard Corben, Kent Williams, Chuck Dixon, Jorge Zaffino, Neil Gaiman, Simon Bisley, Klaus Janson, Andy Helfer, Tanino Liberatore, Matt Wagner, Bill Sienkiewicz, Dennis O'Neil, Teddy Kristiansen, Brian Bolland, Kevin Nowlan, Gary Gianni, Brian Stelfreeze, Katsuhiro Otomo, Jo Duffy",
|
"Additional Authors": "Ted McKeever, Bruce Timm, Joe Kubert, Howard Chaykin, Archie Goodwin, Jos\u00e9 Mu\u00f1oz, Walter Simonson, Jan Strnad, Richard Corben, Kent Williams, Chuck Dixon, Jorge Zaffino, Neil Gaiman, Simon Bisley, Klaus Janson, Andy Helfer, Tanino Liberatore, Matt Wagner, Bill Sienkiewicz, Dennis O'Neil, Teddy Kristiansen, Brian Bolland, Kevin Nowlan, Gary Gianni, Brian Stelfreeze, Katsuhiro Otomo, Jo Duffy",
|
||||||
"ISBN": "1563894394",
|
"ISBN": "1563894394",
|
||||||
"ISBN13": "9781563894398",
|
"ISBN13": "9781563894398",
|
||||||
"Publisher": "DC Comics",
|
"Publisher": "DC Comics",
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"Title": "Bottoms Up and the Devil Laughs",
|
||||||
|
"Author": "Kerry Howley",
|
||||||
|
"Date Added": "2024-01-14"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"Title": "Jonathan Abernathy You Are Kind",
|
"Title": "Jonathan Abernathy You Are Kind",
|
||||||
"Author": "Molly McGhee",
|
"Author": "Molly McGhee",
|
||||||
|
|
6000
data/films/log.json
6000
data/films/log.json
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,14 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"id": 93304,
|
||||||
|
"title": "Alternative 3",
|
||||||
|
"original_language": "en",
|
||||||
|
"original_title": "Alternative 3",
|
||||||
|
"overview": "Purporting to be an investigation into the UK's contemporary \"brain drain\", Alternative 3 uncovered a plan to make the Moon and Mars habitable in the event of climate change and a terminal environmental catastrophe on Earth.",
|
||||||
|
"poster_path": "/tcVu3RX3ZycwAlQhudKwvIguilM.jpg",
|
||||||
|
"release_date": "1977-06-20",
|
||||||
|
"date_added": "2024-01-14"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": 786892,
|
"id": 786892,
|
||||||
"title": "Furiosa: A Mad Max Saga",
|
"title": "Furiosa: A Mad Max Saga",
|
||||||
|
|
|
@ -1,4 +1,12 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"Title": "That's Pretty Clever",
|
||||||
|
"Developers": "Wolfgang Warsch",
|
||||||
|
"Platform": "Board",
|
||||||
|
"Date Started": "2024-01-13",
|
||||||
|
"Date Finished": "2024-01-13",
|
||||||
|
"BGG ID": "244522"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"Title": "Cacho Alalay",
|
"Title": "Cacho Alalay",
|
||||||
"Platform": "Board",
|
"Platform": "Board",
|
||||||
|
@ -9,7 +17,7 @@
|
||||||
"Title": "Cheating Moth",
|
"Title": "Cheating Moth",
|
||||||
"Developers": "Emely Brand and Lukas Brand",
|
"Developers": "Emely Brand and Lukas Brand",
|
||||||
"Platform": "Board",
|
"Platform": "Board",
|
||||||
"BBG ID": "105593",
|
"BGG ID": "105593",
|
||||||
"Date Released": "2011",
|
"Date Released": "2011",
|
||||||
"Date Started": "2024-01-06",
|
"Date Started": "2024-01-06",
|
||||||
"Date Finished": "2024-01-06"
|
"Date Finished": "2024-01-06"
|
||||||
|
|
|
@ -1,4 +1,16 @@
|
||||||
[
|
[
|
||||||
|
{
|
||||||
|
"id": 2316,
|
||||||
|
"name": "The Office",
|
||||||
|
"overview": "The everyday lives of office employees in the Scranton, Pennsylvania branch of the fictional Dunder Mifflin Paper Company.",
|
||||||
|
"poster_path": "/7DJKHzAi83BmQrWLrYYOqcoKfhR.jpg",
|
||||||
|
"first_air_date": "2005-03-24",
|
||||||
|
"origin_country": [
|
||||||
|
"US"
|
||||||
|
],
|
||||||
|
"date_added": "2024-01-14",
|
||||||
|
"added_by_id": "tt0386676"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": 44045,
|
"id": 44045,
|
||||||
"origin_country": [
|
"origin_country": [
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2,7 +2,7 @@
|
||||||
<tr>
|
<tr>
|
||||||
<td colspan=4><h3>Diary</h3></td>
|
<td colspan=4><h3>Diary</h3></td>
|
||||||
</tr>
|
</tr>
|
||||||
{{ range ( sort ( where $.Site.Data.films.log "date_watched" "!=" "" ) "date_watched" "desc" ) }}
|
{{ range ( sort ( where $.Site.Data.films.log "date_completed" "!=" "" ) "date_completed" "desc" ) }}
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ with .date_added }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
<td>{{ with .date_added }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
||||||
<td>
|
<td>
|
||||||
|
@ -10,9 +10,9 @@
|
||||||
</td>
|
</td>
|
||||||
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
||||||
<td>
|
<td>
|
||||||
{{- if .date_watched -}}
|
{{- if .date_completed -}}
|
||||||
{{- time.Format "Jan 2, 2006" .date_watched -}}
|
{{- time.Format "Jan 2, 2006" .date_completed -}}
|
||||||
{{- if .is_rewatch }} ↻{{ end -}}
|
{{- if .is_repeat }} ↻{{ end -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
n/a
|
n/a
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
@ -22,7 +22,7 @@
|
||||||
<tr>
|
<tr>
|
||||||
<td colspan=4><h3>Assorted</h3></td>
|
<td colspan=4><h3>Assorted</h3></td>
|
||||||
</tr>
|
</tr>
|
||||||
{{ range ( sort ( where $.Site.Data.films.log "date_watched" "" ) "title" "asc" ) }}
|
{{ range ( sort ( where $.Site.Data.films.log "date_completed" "" ) "title" "asc" ) }}
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ with .date_added }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
<td>{{ with .date_added }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
||||||
<td>
|
<td>
|
||||||
|
@ -38,9 +38,9 @@
|
||||||
</td>
|
</td>
|
||||||
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
||||||
<td>
|
<td>
|
||||||
{{- if .date_watched -}}
|
{{- if .date_completed -}}
|
||||||
{{- time.Format "Jan 2, 2006" .date_watched -}}
|
{{- time.Format "Jan 2, 2006" .date_completed -}}
|
||||||
{{- if .is_rewatch }} ↻{{ end -}}
|
{{- if .is_repeat }} ↻{{ end -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
n/a
|
n/a
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
@ -64,9 +64,9 @@
|
||||||
</td>
|
</td>
|
||||||
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
<td>{{ with .release_date }}{{ time.Format "Jan 2, 2006" . }}{{ end }}</td>
|
||||||
<td>
|
<td>
|
||||||
{{- if .date_watched -}}
|
{{- if .date_completed -}}
|
||||||
{{- time.Format "Jan 2, 2006" .date_watched -}}
|
{{- time.Format "Jan 2, 2006" .date_completed -}}
|
||||||
{{- if .is_rewatch }} ↻{{ end -}}
|
{{- if .is_repeat }} ↻{{ end -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
n/a
|
n/a
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
|
|
|
@ -15,11 +15,9 @@ load_dotenv()
|
||||||
|
|
||||||
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
||||||
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
||||||
OPENLIBRARY_API_KEY = os.getenv('OPENLIBRARY_API_KEY')
|
|
||||||
|
|
||||||
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
||||||
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
||||||
if "" == OPENLIBRARY_API_KEY: logging.error("OpenLibrary API key not found")
|
|
||||||
|
|
||||||
|
|
||||||
def add_item_to_log(item_id, media_type, log):
|
def add_item_to_log(item_id, media_type, log):
|
||||||
|
@ -29,24 +27,27 @@ def add_item_to_log(item_id, media_type, log):
|
||||||
item = import_by_id(item_id, media_type)
|
item = import_by_id(item_id, media_type)
|
||||||
|
|
||||||
if log in ['log', 'current']:
|
if log in ['log', 'current']:
|
||||||
if 'log' == log:
|
|
||||||
date_watched = ''
|
|
||||||
while re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date_watched) is None:
|
|
||||||
date_watched = input("Enter date watched [YYYY-MM-DD, t for today]:")
|
|
||||||
if 't' == date_watched: date_watched = datetime.today().strftime('%Y-%m-%d')
|
|
||||||
item['date_watched'] = date_watched
|
|
||||||
|
|
||||||
elif 'current' == log:
|
# TODO - review this when moving from one log to another
|
||||||
|
if media_type in ['books', 'tv-series', 'games']:
|
||||||
date_started = ''
|
date_started = ''
|
||||||
while re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date_started) is None:
|
while re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date_started) is None:
|
||||||
date_started = input("Enter date started [YYYY-MM-DD, t for today]: ")
|
date_started = input("Enter date started [YYYY-MM-DD, t for today]: ")
|
||||||
if 't' == date_started: date_started = datetime.today().strftime('%Y-%m-%d')
|
if 't' == date_started: date_started = datetime.today().strftime('%Y-%m-%d')
|
||||||
item['date_started'] = date_started
|
item['date_started'] = date_started
|
||||||
|
|
||||||
is_rewatch = ''
|
if 'log' == log:
|
||||||
while is_rewatch not in ['y', 'n']:
|
date_finished = ''
|
||||||
is_rewatch = input("Is this a rewatch? [y/n]:")
|
while re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date_finished) is None:
|
||||||
if 'y' == is_rewatch: item['is_rewatch'] = True
|
date_finished = input("Enter date finished [YYYY-MM-DD, t for today]: ")
|
||||||
|
if 't' == date_finished: date_finished = datetime.today().strftime('%Y-%m-%d')
|
||||||
|
item['date_finished'] = date_finished
|
||||||
|
|
||||||
|
# TODO - do this automatically
|
||||||
|
is_repeat = ''
|
||||||
|
while is_repeat not in ['y', 'n']:
|
||||||
|
is_repeat = input(f"Is this a repeat entry? [y/n]: ")
|
||||||
|
if 'y' == is_repeat: item['is_repeat'] = True
|
||||||
item['added_by_id'] = item_id
|
item['added_by_id'] = item_id
|
||||||
|
|
||||||
comments = input("Enter comments (optional): ")
|
comments = input("Enter comments (optional): ")
|
||||||
|
@ -75,10 +76,12 @@ def add_item_to_log(item_id, media_type, log):
|
||||||
def import_by_id(import_id, media_type):
|
def import_by_id(import_id, media_type):
|
||||||
if media_type in ['films', 'tv-series']:
|
if media_type in ['films', 'tv-series']:
|
||||||
return import_from_imdb_by_id(import_id, media_type)
|
return import_from_imdb_by_id(import_id, media_type)
|
||||||
|
|
||||||
elif media_type in ['tv-episodes']:
|
elif media_type in ['tv-episodes']:
|
||||||
return #import_from_tvdb_by_id(import_id, media_type)
|
return #import_from_tvdb_by_id(import_id, media_type)
|
||||||
|
|
||||||
elif media_type in ['books']:
|
elif media_type in ['books']:
|
||||||
return #import_from_openlibrary_by_id(import_id, media_type)
|
return import_from_openlibrary_by_id(import_id, media_type)
|
||||||
|
|
||||||
|
|
||||||
def import_from_imdb_by_id(imdb_id, media_type):
|
def import_from_imdb_by_id(imdb_id, media_type):
|
||||||
|
@ -98,12 +101,14 @@ def import_from_imdb_by_id(imdb_id, media_type):
|
||||||
# Process the response
|
# Process the response
|
||||||
if (200 == response.status_code):
|
if (200 == response.status_code):
|
||||||
logging.info(response.status_code)
|
logging.info(response.status_code)
|
||||||
|
|
||||||
elif (429 == response.status_code):
|
elif (429 == response.status_code):
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
import_from_imdb_by_id(imdb_id, media_type)
|
import_from_imdb_by_id(imdb_id, media_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.error(response.text)
|
raise Exception(f"Error {response.status_code}: {response.text}")
|
||||||
|
|
||||||
if ('films' == media_type): results_key = 'movie_results'
|
if ('films' == media_type): results_key = 'movie_results'
|
||||||
elif ('tv-episodes' == media_type): results_key = 'TODO'
|
elif ('tv-episodes' == media_type): results_key = 'TODO'
|
||||||
|
@ -113,37 +118,198 @@ def import_from_imdb_by_id(imdb_id, media_type):
|
||||||
|
|
||||||
if 1 == len(response_data):
|
if 1 == len(response_data):
|
||||||
item = response_data[0]
|
item = response_data[0]
|
||||||
|
|
||||||
elif 0 == len(response_data):
|
elif 0 == len(response_data):
|
||||||
logging.error(f"Returned no results for {imdb_id}")
|
raise Exception(f"Returned no results for {imdb_id}")
|
||||||
return
|
|
||||||
elif 1 < len(response_data):
|
elif 1 < len(response_data):
|
||||||
logging.warning(f"Returned more than one {media_type} for ID {imdb_id}")
|
logging.warning(f"Returned more than one {media_type} for ID '{imdb_id}'")
|
||||||
print(f"Returned more than one {media_type} for ID {imdb_id}:\n")
|
print(f"Returned more than one {media_type} for ID '{imdb_id}':\n")
|
||||||
print(json.dumps(response_data, indent=4))
|
print(json.dumps(response_data, indent=4))
|
||||||
idx = input("\nEnter the index of the result to use: ")
|
idx = input("\nEnter the index of the result to use: ")
|
||||||
try:
|
try:
|
||||||
item = response_data[int(idx)]
|
item = response_data[int(idx)]
|
||||||
|
|
||||||
except:
|
except:
|
||||||
logging.error("Index invalid!")
|
raise Exception(f"Index {idx} is invalid")
|
||||||
print("Index invalid!")
|
|
||||||
|
|
||||||
# Modify the returned result to add additional data
|
# Modify the returned result to add additional data
|
||||||
return cleanup_result(item)
|
return cleanup_result(item, media_type)
|
||||||
|
|
||||||
|
|
||||||
def cleanup_result(item):
|
def import_from_openlibrary_by_id(isbn, media_type):
|
||||||
"""Process a film or TV episode returned by the TMDB API by removing unnecessary fields and adding others"""
|
"""Retrieve a film, TV show or TV episode from TMDB using an IMDB ID"""
|
||||||
|
|
||||||
for field_name in ['adult', 'backdrop_path', 'episode_type', 'genre_ids', 'media_type', 'origin_country', 'popularity', 'production_code', 'runtime', 'still_path', 'video', 'vote_average', 'vote_count']:
|
api_url = f"https://openlibrary.org/isbn/{isbn}"
|
||||||
|
|
||||||
|
# Sending API request
|
||||||
|
response = requests.get(
|
||||||
|
api_url,
|
||||||
|
headers={'accept': 'application/json'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process the response
|
||||||
|
if (200 == response.status_code):
|
||||||
|
logging.info(response.status_code)
|
||||||
|
|
||||||
|
elif (429 == response.status_code):
|
||||||
|
time.sleep(2)
|
||||||
|
import_from_openlibrary_by_id(isbn, media_type)
|
||||||
|
return
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception(f"Error {reponse.status_code}: {response.text}")
|
||||||
|
|
||||||
|
item = json.loads(response.text)
|
||||||
|
|
||||||
|
for key in ['authors', 'works']:
|
||||||
|
if key in item:
|
||||||
|
for i, sub_item in enumerate(item[key]):
|
||||||
|
item[key][i] = import_from_openlibrary_by_ol_key(sub_item['key'])
|
||||||
|
|
||||||
|
if 'works' in item:
|
||||||
|
if len(item['works']) > 1:
|
||||||
|
raise Exception(f"Multiple works found for {isbn}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
item['work'] = item['works'][0]
|
||||||
|
del item['works']
|
||||||
|
|
||||||
|
# Modify the returned result to add additional data
|
||||||
|
return cleanup_result(item, media_type)
|
||||||
|
|
||||||
|
|
||||||
|
def import_from_openlibrary_by_ol_key(key):
|
||||||
|
"""Retrieves an item (author or work) from OpenLibrary using an OL key"""
|
||||||
|
|
||||||
|
_, mode, ol_id = key.split('/')
|
||||||
|
|
||||||
|
if mode in ['works', 'authors']:
|
||||||
|
api_url = f"https://openlibrary.org{key}"
|
||||||
|
|
||||||
|
# Sending API request
|
||||||
|
response = requests.get(
|
||||||
|
api_url,
|
||||||
|
headers={'accept': 'application/json'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process the response
|
||||||
|
if (200 == response.status_code):
|
||||||
|
logging.info(response.status_code)
|
||||||
|
|
||||||
|
elif (429 == response.status_code):
|
||||||
|
time.sleep(2)
|
||||||
|
import_from_openlibrary_by_ol_key(key)
|
||||||
|
return
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception(f"Error {reponse.status_code}: {response.text}")
|
||||||
|
|
||||||
|
item = json.loads(response.text)
|
||||||
|
|
||||||
|
if 'authors' == mode:
|
||||||
|
author = {
|
||||||
|
'id': ol_id,
|
||||||
|
'name': item['name']
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'personal_name' in item:
|
||||||
|
if item['name'] != item['personal_name']: author['personal_name'] = item['personal_name']
|
||||||
|
|
||||||
|
return author
|
||||||
|
|
||||||
|
elif 'works' == mode:
|
||||||
|
work = {
|
||||||
|
'id': ol_id,
|
||||||
|
'title': item['title']
|
||||||
|
}
|
||||||
|
|
||||||
|
for key in ['first_publish_date', 'subjects']:
|
||||||
|
if key in item: work[key] = item[key]
|
||||||
|
|
||||||
|
return work
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception(f"Unknown OpenLibrary key '{mode}'")
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_result(item, media_type):
|
||||||
|
"""Process a film, TV series, TV episode or book returned by their respecitve APIs by removing unnecessary fields and adding others"""
|
||||||
|
|
||||||
|
for field_name in [
|
||||||
|
'adult', # TMDB
|
||||||
|
'backdrop_path', # TMDB
|
||||||
|
'copyright_date', # OpenLibrary
|
||||||
|
'classifications', # OpenLibrary
|
||||||
|
'created', # OpenLibrary
|
||||||
|
'episode_type', # TMDB
|
||||||
|
'first_sentence', # OpenLibrary
|
||||||
|
'genre_ids', # TMDB
|
||||||
|
'identifiers', # OpenLibrary
|
||||||
|
'media_type', # TMDB
|
||||||
|
'last_modified', # OpenLibrary
|
||||||
|
'latest_revision', # OpenLibrary
|
||||||
|
'lc_classifications', # OpenLibrary
|
||||||
|
'local_id', # OpenLibrary
|
||||||
|
'ocaid', # OpenLibrary
|
||||||
|
'popularity', # TMDB
|
||||||
|
'production_code', # TMDB
|
||||||
|
'revision', # OpenLibrary
|
||||||
|
'runtime', # TMDB
|
||||||
|
'source_records', # OpenLibrary
|
||||||
|
'still_path', # TMDB
|
||||||
|
'type', # OpenLibrary
|
||||||
|
'video', # TMDB
|
||||||
|
'vote_average', # TMDB
|
||||||
|
'vote_count' # TMDB
|
||||||
|
]:
|
||||||
if field_name in item: del item[field_name]
|
if field_name in item: del item[field_name]
|
||||||
|
|
||||||
# TODO - select automatically
|
if media_type in ['films', 'tv-series']:
|
||||||
title_key = 'name'
|
title_key = 'name' if 'tv-series' == media_type else 'title'
|
||||||
|
|
||||||
if f"original_{title_key}" in item and 'original_language' in item:
|
if f"original_{title_key}" in item and 'original_language' in item:
|
||||||
if item[f"original_{title_key}"] == item[title_key] and item['original_language'] == 'en':
|
if item[f"original_{title_key}"] == item[title_key] and item['original_language'] == 'en':
|
||||||
del item[f"original_{title_key}"], item['original_language']
|
del item[f"original_{title_key}"], item['original_language']
|
||||||
|
|
||||||
|
if 'books' == media_type:
|
||||||
|
_, _, item['id'] = item['key'].split('/')
|
||||||
|
del item['key']
|
||||||
|
|
||||||
|
for key in ['isbn_10', 'isbn_13']:
|
||||||
|
if key in item:
|
||||||
|
if len(item[key]) > 1:
|
||||||
|
raise Exception("Multiple ISBN results")
|
||||||
|
|
||||||
|
else:
|
||||||
|
item[key] = item[key][0]
|
||||||
|
|
||||||
|
if 'publish_places' in item:
|
||||||
|
if len(item['publish_places']) > 1:
|
||||||
|
raise Exception("Multiple publish_places")
|
||||||
|
|
||||||
|
else:
|
||||||
|
item['published_in'] = item['publish_places'][0]
|
||||||
|
del item['publish_places']
|
||||||
|
|
||||||
|
if 'languages' in item:
|
||||||
|
item['languages'] = [lang['key'].split('/')[2] for lang in item['languages']]
|
||||||
|
|
||||||
|
if 'translation_of' in item:
|
||||||
|
if item['translation_of'] == item['work']['title']:
|
||||||
|
del item['translation_of']
|
||||||
|
else:
|
||||||
|
raise Exception(f"translation_of '{item['translation_of']}' is different to work title '{item['work']['title']}'")
|
||||||
|
|
||||||
|
if 'translated_from' in item:
|
||||||
|
if len(item['translated_from']) > 1:
|
||||||
|
raise Exception("Multiple translated_from results")
|
||||||
|
|
||||||
|
else:
|
||||||
|
item['work']['original_language'] = item['translated_from'][0]['key'].split('/')[2]
|
||||||
|
del item['translated_from']
|
||||||
|
|
||||||
|
|
||||||
if 'date_added' not in item: item['date_added'] = datetime.today().strftime('%Y-%m-%d')
|
if 'date_added' not in item: item['date_added'] = datetime.today().strftime('%Y-%m-%d')
|
||||||
|
|
||||||
return item
|
return item
|
||||||
|
@ -154,6 +320,7 @@ def main():
|
||||||
while media_type not in ['films', 'tv-episodes', 'tv-series', 'books']:
|
while media_type not in ['films', 'tv-episodes', 'tv-series', 'books']:
|
||||||
media_type = input("Select media type [films|tv-episodes|tv-series|books]: ")
|
media_type = input("Select media type [films|tv-episodes|tv-series|books]: ")
|
||||||
|
|
||||||
|
try:
|
||||||
if 'films' == media_type:
|
if 'films' == media_type:
|
||||||
log = ''
|
log = ''
|
||||||
while log not in ['log', 'wishlist']:
|
while log not in ['log', 'wishlist']:
|
||||||
|
@ -174,6 +341,8 @@ def main():
|
||||||
while re.search("[0-9]+", isbn) is None:
|
while re.search("[0-9]+", isbn) is None:
|
||||||
isbn = input("Enter ISBN: ")
|
isbn = input("Enter ISBN: ")
|
||||||
|
|
||||||
|
add_item_to_log(isbn, media_type, log)
|
||||||
|
|
||||||
elif 'tv-episodes' == media_type:
|
elif 'tv-episodes' == media_type:
|
||||||
imdb_id = ''
|
imdb_id = ''
|
||||||
while re.search("tt[0-9]+", imdb_id) is None:
|
while re.search("tt[0-9]+", imdb_id) is None:
|
||||||
|
@ -192,6 +361,10 @@ def main():
|
||||||
|
|
||||||
add_item_to_log(imdb_id, media_type, log)
|
add_item_to_log(imdb_id, media_type, log)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
logging.error(repr(error))
|
||||||
|
print(error)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -14,11 +14,9 @@ load_dotenv()
|
||||||
|
|
||||||
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
TMDB_API_KEY = os.getenv('TMDB_API_KEY')
|
||||||
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
TVDB_API_KEY = os.getenv('TVDB_API_KEY')
|
||||||
OPENLIBRARY_API_KEY = os.getenv('OPENLIBRARY_API_KEY')
|
|
||||||
|
|
||||||
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
if "" == TMDB_API_KEY: logging.error("TMDB API key not found")
|
||||||
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
if "" == TVDB_API_KEY: logging.error("TVDB API key not found")
|
||||||
if "" == OPENLIBRARY_API_KEY: logging.error("OpenLibrary API key not found")
|
|
||||||
|
|
||||||
def process_log(media_type, log):
|
def process_log(media_type, log):
|
||||||
logging.info(f"Processing {media_type}/{log}…")
|
logging.info(f"Processing {media_type}/{log}…")
|
||||||
|
@ -41,29 +39,48 @@ def process_log(media_type, log):
|
||||||
if 'Date Added' in item:
|
if 'Date Added' in item:
|
||||||
log_item_values['date_added'] = item['Date Added']
|
log_item_values['date_added'] = item['Date Added']
|
||||||
del item['Date Added']
|
del item['Date Added']
|
||||||
|
|
||||||
if 'Date Watched' in item:
|
if 'Date Watched' in item:
|
||||||
log_item_values['date_watched'] = item['Date Watched']
|
log_item_values['date_finished'] = item['Date Watched']
|
||||||
del item['Date Watched']
|
del item['Date Watched']
|
||||||
|
|
||||||
if 'Rewatch' in item:
|
if 'Rewatch' in item:
|
||||||
log_item_values['is_rewatch'] = item['Rewatch']
|
log_item_values['is_repeat'] = item['Rewatch']
|
||||||
del item['Rewatch']
|
del item['Rewatch']
|
||||||
|
|
||||||
if 'Comments' in item:
|
if 'Comments' in item:
|
||||||
log_item_values['comments'] = item['Comments']
|
log_item_values['comments'] = item['Comments']
|
||||||
del item['Comments']
|
del item['Comments']
|
||||||
|
|
||||||
if 'Series Title' in item:
|
if 'Series Title' in item:
|
||||||
log_item_values['series_title'] = item['Series Title']
|
log_item_values['series_title'] = item['Series Title']
|
||||||
del item['Series Title']
|
del item['Series Title']
|
||||||
|
|
||||||
if 'Episode Title' in item:
|
if 'Episode Title' in item:
|
||||||
log_item_values['name'] = item['Episode Title']
|
log_item_values['name'] = item['Episode Title']
|
||||||
del item['Episode Title']
|
del item['Episode Title']
|
||||||
|
|
||||||
if 'Episode Number' in item:
|
if 'Episode Number' in item:
|
||||||
split_num = log_item_values['episode_number'].split("E")
|
if re.search("[0-9]+x[0-9]+", item['Episode Number']) is not None:
|
||||||
log_item_values['episode_number'] = split_num[1]
|
season_no, _, episode_no = log_item_values['episode_number'].split("x")
|
||||||
log_item_values['season_number'] = split_num[0] or None
|
|
||||||
|
elif re.search("S[0-9]+E[0-9]+", item['Episode Number']) is not None:
|
||||||
|
season_no, _, episode_no = log_item_values['episode_number'].split("E")
|
||||||
|
|
||||||
|
elif re.search("E[0-9]+", item['Episode Number']) is not None:
|
||||||
|
season_no = None
|
||||||
|
episode_no = item['episode_number'][1:]
|
||||||
|
else:
|
||||||
|
logging.error(f"Invalid episode number format '{item['Episode Number']}'")
|
||||||
|
return
|
||||||
|
|
||||||
|
log_item_values['season_number'] = season_no
|
||||||
|
log_item_values['episode_number'] = episode_no
|
||||||
del item['Episode Number']
|
del item['Episode Number']
|
||||||
|
|
||||||
if 'IMDB ID' in item:
|
if 'IMDB ID' in item:
|
||||||
log_items[i] = import_by_id(item['IMDB ID'], media_type)
|
log_items[i] = import_by_id(item['IMDB ID'], media_type)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log_items[i] = import_by_details(item, item_title, media_type)
|
log_items[i] = import_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
|
@ -75,6 +92,7 @@ def process_log(media_type, log):
|
||||||
|
|
||||||
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
with open(f"./data/{media_type}/{log}.json", "w") as log_file:
|
||||||
json.dump(log_items, log_file, indent=4)
|
json.dump(log_items, log_file, indent=4)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Skipped {item_title}")
|
logging.warning(f"Skipped {item_title}")
|
||||||
|
|
||||||
|
@ -92,11 +110,16 @@ def process_log(media_type, log):
|
||||||
def import_by_details(item, item_title, media_type):
|
def import_by_details(item, item_title, media_type):
|
||||||
if media_type in ['films', 'tv-series']:
|
if media_type in ['films', 'tv-series']:
|
||||||
return import_from_tmdb_by_details(item, item_title, media_type)
|
return import_from_tmdb_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
elif media_type in ['tv-episodes']:
|
elif media_type in ['tv-episodes']:
|
||||||
return #import_from_tvdb_by_details(item, item_title, media_type)
|
return #import_from_tvdb_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
elif media_type in ['books']:
|
elif media_type in ['books']:
|
||||||
return #import_from_openlibrary_by_details(item, item_title, media_type)
|
return #import_from_openlibrary_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
|
elif media_type in ['games']:
|
||||||
|
return #import_from_igdb_by_details(item, item_title, media_type)
|
||||||
|
|
||||||
|
|
||||||
def import_from_tmdb_by_details(item, item_title, media_type):
|
def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
"""Retrieve a film or TV series from TMDB using its title"""
|
"""Retrieve a film or TV series from TMDB using its title"""
|
||||||
|
@ -128,7 +151,7 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
response_data = json.loads(response.text)['results']
|
response_data = json.loads(response.text)['results']
|
||||||
|
|
||||||
if 1 == len(response_data):
|
if 1 == len(response_data):
|
||||||
return cleanup_result(response_data[0])
|
return cleanup_result(response_data[0], media_type)
|
||||||
|
|
||||||
elif 0 == len(response_data):
|
elif 0 == len(response_data):
|
||||||
logging.warning(f"Returned no {media_type} for {item_title}")
|
logging.warning(f"Returned no {media_type} for {item_title}")
|
||||||
|
@ -140,7 +163,7 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
response_data = [result for result in response_data if result[title_key] == item_title]
|
response_data = [result for result in response_data if result[title_key] == item_title]
|
||||||
|
|
||||||
if 1 == len(response_data):
|
if 1 == len(response_data):
|
||||||
return cleanup_result(response_data[0])
|
return cleanup_result(response_data[0], media_type)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Returned more than one {media_type} for '{item_title}':\n")
|
logging.warning(f"Returned more than one {media_type} for '{item_title}':\n")
|
||||||
|
@ -149,7 +172,8 @@ def import_from_tmdb_by_details(item, item_title, media_type):
|
||||||
|
|
||||||
if "" != idx:
|
if "" != idx:
|
||||||
try:
|
try:
|
||||||
return cleanup_result(response_data[int(idx)])
|
return cleanup_result(response_data[int(idx)], media_type)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
logging.error("Index invalid!")
|
logging.error("Index invalid!")
|
||||||
print("Index invalid!")
|
print("Index invalid!")
|
||||||
|
@ -179,9 +203,13 @@ while media_type not in ['films', 'tv-episodes', 'tv-series', 'books']:
|
||||||
while log not in ['log', 'current', 'wishlist']:
|
while log not in ['log', 'current', 'wishlist']:
|
||||||
log = input ("Enter log to process [log|current|wishlist]:")
|
log = input ("Enter log to process [log|current|wishlist]:")
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
|
||||||
elif 'tv-episodes' == media_type:
|
elif 'tv-episodes' == media_type:
|
||||||
process_log(media_type, 'log')
|
process_log(media_type, 'log')
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
|
||||||
elif 'tv-series' == media_type:
|
elif 'tv-series' == media_type:
|
||||||
log = ''
|
log = ''
|
||||||
while log not in ['log', 'current', 'wishlist']:
|
while log not in ['log', 'current', 'wishlist']:
|
||||||
|
|
Loading…
Reference in a new issue