From 6d0140759052edc0536c4a7dfcd8546a39ca4a3c Mon Sep 17 00:00:00 2001 From: Gaisberg <93206976+Gaisberg@users.noreply.github.com> Date: Mon, 4 Nov 2024 12:56:58 +0200 Subject: [PATCH] Requesthandler bugfixes (#847) * refactor: update scraping logic in state_transition.py * chore: fix plex watchlist validation, jacket and prowlar invalid response types * chore: revert _get_indexers changes --------- Co-authored-by: Gaisberg --- src/program/services/content/plex_watchlist.py | 2 +- src/program/services/scrapers/jackett.py | 2 +- src/program/services/scrapers/prowlarr.py | 4 ++-- src/program/state_transition.py | 6 +++--- src/program/utils/request.py | 6 ++---- 5 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/program/services/content/plex_watchlist.py b/src/program/services/content/plex_watchlist.py index bff1dadd..8bbef8c6 100644 --- a/src/program/services/content/plex_watchlist.py +++ b/src/program/services/content/plex_watchlist.py @@ -26,7 +26,7 @@ def validate(self): logger.error("Plex token is not set!") return False try: - self.api.validate() + self.api.validate_account() except Exception as e: logger.error(f"Unable to authenticate Plex account: {e}") return False diff --git a/src/program/services/scrapers/jackett.py b/src/program/services/scrapers/jackett.py index 79c6fd4d..5628e768 100644 --- a/src/program/services/scrapers/jackett.py +++ b/src/program/services/scrapers/jackett.py @@ -243,7 +243,7 @@ def _fetch_results(self, url: str, params: Dict[str, str], indexer_title: str, s """Fetch results from the given indexer""" try: response = get(session=self.session, url=url, params=params, timeout=self.settings.timeout) - return self._parse_xml(response.data) + return self._parse_xml(response.response.text) except (HTTPError, ConnectionError, Timeout): logger.debug(f"Indexer failed to fetch results for {search_type}: {indexer_title}") except Exception as e: diff --git a/src/program/services/scrapers/prowlarr.py b/src/program/services/scrapers/prowlarr.py index 980bb4c9..4dc5292f 100644 --- a/src/program/services/scrapers/prowlarr.py +++ b/src/program/services/scrapers/prowlarr.py @@ -227,14 +227,14 @@ def _get_indexer_from_json(self, json_content: str) -> list[ProwlarrIndexer]: indexer_list = [] for indexer in json.loads(json_content): indexer_list.append(ProwlarrIndexer(title=indexer["name"], id=str(indexer["id"]), link=indexer["infoLink"], type=indexer["protocol"], language=indexer["language"], movie_search_capabilities=(s[0] for s in indexer["capabilities"]["movieSearchParams"]) if len([s for s in indexer["capabilities"]["categories"] if s["name"] == "Movies"]) > 0 else None, tv_search_capabilities=(s[0] for s in indexer["capabilities"]["tvSearchParams"]) if len([s for s in indexer["capabilities"]["categories"] if s["name"] == "TV"]) > 0 else None)) - + return indexer_list def _fetch_results(self, url: str, params: Dict[str, str], indexer_title: str, search_type: str) -> List[Tuple[str, str]]: """Fetch results from the given indexer""" try: response = get(self.session, url, params=params, timeout=self.timeout) - return self._parse_xml(response.data, indexer_title) + return self._parse_xml(response.response.text, indexer_title) except (HTTPError, ConnectionError, Timeout): logger.debug(f"Indexer failed to fetch results for {search_type.title()} with indexer {indexer_title}") except Exception as e: diff --git a/src/program/state_transition.py b/src/program/state_transition.py index 7596f221..451fe11b 100644 --- a/src/program/state_transition.py +++ b/src/program/state_transition.py @@ -35,12 +35,12 @@ def process_event(emitted_by: Service, existing_item: MediaItem | None = None, c elif existing_item is not None and existing_item.last_state == States.Indexed: next_service = Scraping - if emitted_by != Scraping and Scraping.can_we_scrape(existing_item): + if emitted_by != Scraping and Scraping.should_submit(existing_item): items_to_submit = [existing_item] elif existing_item.type == "show": - items_to_submit = [s for s in existing_item.seasons if s.last_state != States.Completed and Scraping.can_we_scrape(s)] + items_to_submit = [s for s in existing_item.seasons if s.last_state != States.Completed and Scraping.should_submit(s)] elif existing_item.type == "season": - items_to_submit = [e for e in existing_item.episodes if e.last_state != States.Completed and Scraping.can_we_scrape(e)] + items_to_submit = [e for e in existing_item.episodes if e.last_state != States.Completed and Scraping.should_submit(e)] elif existing_item is not None and existing_item.last_state == States.Scraped: next_service = Downloader diff --git a/src/program/utils/request.py b/src/program/utils/request.py index b15838f8..a5b26bce 100644 --- a/src/program/utils/request.py +++ b/src/program/utils/request.py @@ -70,11 +70,10 @@ def __init__(self, message, response=None): class ResponseObject: """Response object to handle different response formats.""" - def __init__(self, response: Response, response_type=SimpleNamespace): + def __init__(self, response: Response): self.response = response self.is_ok = response.ok self.status_code = response.status_code - self.response_type = response_type self.data = self.handle_response(response) def handle_response(self, response: Response) -> dict: @@ -158,7 +157,6 @@ def _make_request( timeout=5, additional_headers=None, retry_if_failed=True, - response_type=SimpleNamespace, proxies=None, json=None, ) -> ResponseObject: @@ -179,7 +177,7 @@ def _make_request( finally: session.close() - return ResponseObject(response, response_type) + return ResponseObject(response) def ping(session: Session, url: str, timeout: int = 10, additional_headers=None, proxies=None, params=None) -> ResponseObject: """Ping method to check connectivity to a URL by making a simple GET request."""