From 86cb92280b071c6f18a31640ec8c7a885b4a3d09 Mon Sep 17 00:00:00 2001 From: e0406370 Date: Fri, 7 Nov 2025 15:07:54 +0800 Subject: [PATCH 1/3] [fix] prevent double encoding of query params --- src/scrape/base_scraper.py | 5 ++--- src/utility/utils.py | 8 +++++++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/scrape/base_scraper.py b/src/scrape/base_scraper.py index 8924fda..b9f1262 100644 --- a/src/scrape/base_scraper.py +++ b/src/scrape/base_scraper.py @@ -6,7 +6,6 @@ from src.utility.lib import CustomException, Logger from src.utility.utils import EndpointType, Utils, ViewType from typing import Dict, Type, TypeVar -from urllib.parse import urlencode T = TypeVar("T", bound="BaseScraper") @@ -23,7 +22,7 @@ def scrape(cls: Type[T], endpoint: Endpoint, req: Request) -> T | None: if endpoint.type == EndpointType.QUERY: params = req.query_params - url = Utils.create_filmarks_link(endpoint.path + "?" + urlencode(params)) + url = Utils.create_filmarks_link(endpoint.path + "?" + Utils.safe_encode(params)) elif endpoint.type == EndpointType.PATH: params = req.path_params @@ -31,7 +30,7 @@ def scrape(cls: Type[T], endpoint: Endpoint, req: Request) -> T | None: elif endpoint.type == EndpointType.COMBINED: params = {**req.query_params, **req.path_params} - url = Utils.create_filmarks_link(endpoint.path.format(**req.path_params) + "?" + urlencode(req.query_params)) + url = Utils.create_filmarks_link(endpoint.path.format(**req.path_params) + "?" + Utils.safe_encode(req.query_params)) else: raise ValueError(f"Unexpected EndpointType: {endpoint.type}") # pragma: no cover diff --git a/src/utility/utils.py b/src/utility/utils.py index dc91ff9..e0a8daf 100644 --- a/src/utility/utils.py +++ b/src/utility/utils.py @@ -1,9 +1,10 @@ from datetime import datetime, timezone from enum import Enum +from fastapi.datastructures import QueryParams from msgspec import Struct from src.utility.models import AnimeDataClip, AnimeDataMark, DramaDataClip, DramaDataMark, MovieDataClip, MovieDataMark from typing import Any, Dict, Set, Tuple -from urllib.parse import urljoin +from urllib.parse import unquote, urlencode, urljoin class EndpointType(str, Enum): @@ -102,6 +103,11 @@ class Utils: @staticmethod def get_scrape_date() -> datetime: return datetime.now(timezone.utc).isoformat(sep=" ", timespec="microseconds") + + @staticmethod + def safe_encode(params: QueryParams) -> str: + params = {k: unquote(v) for k, v in params.items()} + return urlencode(params) @staticmethod def create_filmarks_link(url: str) -> str: From 675239127df9818e02a2ced4601b9c4ed98235ed Mon Sep 17 00:00:00 2001 From: e0406370 Date: Sat, 8 Nov 2025 07:41:28 +0800 Subject: [PATCH 2/3] [feat] add episode details to info EP for anime and drama views --- src/scrape/info/info_anime_scraper.py | 2 ++ src/scrape/info/info_drama_scraper.py | 2 ++ src/scrape/info/info_scraper.py | 14 ++++++++++ src/utility/utils.py | 13 +++++++++ tests/anime/test_anime_info.py | 39 +++++++++++++++++++++++++++ tests/drama/test_drama_info.py | 37 +++++++++++++++++++++++++ 6 files changed, 107 insertions(+) diff --git a/src/scrape/info/info_anime_scraper.py b/src/scrape/info/info_anime_scraper.py index 65a0008..2ec933e 100644 --- a/src/scrape/info/info_anime_scraper.py +++ b/src/scrape/info/info_anime_scraper.py @@ -75,6 +75,8 @@ def set_info_data(self) -> None: value = self._get_person_info(field) if value: self.data[field.key] = value + self.data["episodes"] = self._get_episode_info() + Logger.info(self.get_logging(id=[self.series_id, self.season_id], text=self.data)) def set_review_data(self) -> None: diff --git a/src/scrape/info/info_drama_scraper.py b/src/scrape/info/info_drama_scraper.py index 20612c5..6b9b8ad 100644 --- a/src/scrape/info/info_drama_scraper.py +++ b/src/scrape/info/info_drama_scraper.py @@ -70,6 +70,8 @@ def set_info_data(self) -> None: value = self._get_person_info(field) if value: self.data[field.key] = value + self.data["episodes"] = self._get_episode_info() + Logger.info(self.get_logging(id=[self.series_id, self.season_id], text=self.data)) def set_review_data(self) -> None: diff --git a/src/scrape/info/info_scraper.py b/src/scrape/info/info_scraper.py index 0dd4eb3..7cda8ab 100644 --- a/src/scrape/info/info_scraper.py +++ b/src/scrape/info/info_scraper.py @@ -131,6 +131,20 @@ def _get_person_info(self, field: PersonInfo) -> List[Dict[str, Any]] | None: in info_elem.find_next_sibling("ul").find_all("li") ] if info_elem else None + def _get_episode_info(self) -> List[Dict[str, Any]] | None: + info_elem = self.detail_head.select("div.c2-episode-list-item") + + return [ + Utils.create_episode_info( + episode=episode.select_one("div.c2-episode-list-item__header-text-number").text, + title=episode.select_one("div.c2-episode-list-item__header-text-title").text, + outline=epi.text.replace("\n", "") if (epi := episode.select_one("div.c2-episode-list-item__outline-text")) else "", + link=episode.select_one("a").attrs["href"] + ) + for episode + in info_elem + ] if info_elem else None + def _is_reviews_empty(self) -> Tag | None: condition = self.detail_foot.select_one("div.p2-empty-reviews-message__text") diff --git a/src/utility/utils.py b/src/utility/utils.py index e0a8daf..6f3a8db 100644 --- a/src/utility/utils.py +++ b/src/utility/utils.py @@ -135,6 +135,19 @@ def create_person_info(name: str, link: str, character: str = "") -> Dict[str, A return person_info + @staticmethod + def create_episode_info(episode: str, title: str, link: str, outline: str = "") -> Dict[str, Any]: + episode_info = {} + + episode_info["episode"] = int(episode) + episode_info["title"] = title + if outline: + episode_info["outline"] = outline + episode_info["id"] = int(link.split("/")[-1]) + episode_info["link"] = Utils.create_filmarks_link(link) + + return episode_info + @staticmethod def create_review_info(user_name: str, user_link: str, review_date: str, review_rating: str, review_contents: str = "", review_link: str = "") -> Dict[str, Any]: review_info = {} diff --git a/tests/anime/test_anime_info.py b/tests/anime/test_anime_info.py index 83ab1db..9153025 100644 --- a/tests/anime/test_anime_info.py +++ b/tests/anime/test_anime_info.py @@ -185,6 +185,15 @@ def test_review_input_more_than_max_threshold(client_nc, path) -> None: "link": "https://filmarks.com/people/214282", }, ], + "episodes": [ + { + "episode": 1, + "title": "漂流? 冒険の島!", + "outline": "干ばつ。洪水。真夏に降る雪・・・。世界中がおかしかったその夏。日本からは見えるはずのないオーロラを目撃した太一たちは、オーロラの裂け目から飛来した謎の光に異世界へと連れ去られてしまう。すべてが未知のその世界で太一たちが最初に出会ったのは、自分たちを「待っていた」という奇妙な生物、デジタルモンスターだった。", + "id": 73115, + "link": "https://filmarks.com/animes/2592/3304/episodes/73115" + }, + ], }, ], ) @@ -214,6 +223,7 @@ def test_info_with_results_single_1(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -302,6 +312,14 @@ def test_info_with_results_single_1(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/275804", }, ], + "episodes": [ + { + "episode": 1, + "title": "第1話", + "id": 39867, + "link": "https://filmarks.com/animes/1533/2046/episodes/39867" + }, + ], }, ], ) @@ -331,6 +349,7 @@ def test_info_with_results_single_2(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -413,6 +432,15 @@ def test_info_with_results_single_2(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/307449", }, ], + "episodes": [ + { + "episode": 1, + "title": "第1章 目覚め", + "outline": "長い眠りから目覚めたアン。火の国の軍艦に乗っていることに気づき驚くが、それは仲間が乗っ取ったものだった。これまでの経緯を聞き、アンは死んだことになっていると知ってショックを受ける。サカの計画では、日食の時に火の国を奇襲するとのことだったが、アンは自分一人で戦うべきだと飛び出す。3年ぶりに火の国に戻ったズーコは父である王と再会。王はズーコがアバターを殺したと聞いて喜ぶが、ズーコはアンが生きているのではと思っていた。", + "id": 114131, + "link": "https://filmarks.com/animes/3691/4983/episodes/114131" + }, + ], }, ], ) @@ -441,6 +469,7 @@ def test_info_with_results_single_3(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -514,6 +543,15 @@ def test_info_with_results_single_3(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/287199" } ], + "episodes": [ + { + "episode": 1, + "title": "2016年のきみへ", + "outline": "ハッピーを広めるため地球に降り立ったタコピーは、人間の女の子しずかと出会う。ピンチを救ってもらったタコピーは、不思議な力を持つハッピー道具で彼女のために奔走するのだが、しずかは笑顔すら見せない。どうやらその背景には学校のお友達とおうちの複雑な事情が関係しているようで……。", + "id": 143599, + "link": "https://filmarks.com/animes/4809/6518/episodes/143599" + }, + ], }, ], ) @@ -544,6 +582,7 @@ def test_info_with_results_single_4(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") diff --git a/tests/drama/test_drama_info.py b/tests/drama/test_drama_info.py index d5eb6a2..76ce63a 100644 --- a/tests/drama/test_drama_info.py +++ b/tests/drama/test_drama_info.py @@ -154,6 +154,14 @@ def test_review_input_more_than_max_threshold(client_nc, path) -> None: "link": "https://filmarks.com/people/146784", }, ], + "episodes": [ + { + "episode": 1, + "title": "#1", + "id": 33060, + "link": "https://filmarks.com/dramas/1137/2418/episodes/33060" + }, + ], }, ], ) @@ -182,6 +190,7 @@ def test_info_with_results_single_1(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -275,6 +284,14 @@ def test_info_with_results_single_1(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/85588", }, ], + "episodes": [ + { + "episode": 1, + "title": "第1話", + "id": 191817, + "link": "https://filmarks.com/dramas/11358/15763/episodes/191817" + }, + ], }, ], ) @@ -302,6 +319,7 @@ def test_info_with_results_single_2(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -424,6 +442,14 @@ def test_info_with_results_single_2(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/127871", }, ], + "episodes": [ + { + "episode": 1, + "title": "マイケル", + "id": 3815, + "link": "https://filmarks.com/dramas/88/335/episodes/3815" + }, + ], }, ], ) @@ -452,6 +478,7 @@ def test_info_with_results_single_3(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") @@ -590,6 +617,15 @@ def test_info_with_results_single_3(client_nc, test_data, caplog) -> None: "link": "https://filmarks.com/people/6488" } ], + "episodes": [ + { + "episode": 1, + "title": "第1話", + "outline": "「3000万円で、あなたを買います」――。心優しきゲイのおじさんを買ったのは、中3のトーヨコ中学生!?動物飼育員の波多野玄一、50歳。恋愛対象が男性の、いわゆるゲイのおじさん。アパートで動物たちと暮らす玄一は、ファミリーサイズのアイスを一緒に食べてくれる恋人がほしくなってパートナー相談所に通うものの、手応えはサッパリ。「やっぱり、ひとりでコツコツ食べます」と肩を落とす玄一に、相談所の百瀬(渋谷凪咲)が一言、「恋と革命です。『人間は、恋と革命のために生まれてきたのだ』。太宰の言葉です」――。一方、中学校教師の作田索は、ゲイなのに婚姻届を書いてみた。『夫となる人 吉田亮太。夫となる人 作田索』。受理されるはずもなく…。恋にも人生にも冷めきっていて、恋人との別れを決断する索。ひょんなことから索と出会った玄一は、他人事とは思えずに、「だったら家を買うってどうですか?人間は、恋と革命のために生まれてきたんです!家を『かすがい』にして、俺たちの恋愛にだって意味があることを証明しましょう!」。そんな玄一に、索の生徒・楠ほたるが突然、「3000万円あります。家欲しいんですよね。私、あなたを買います」。学校に行かずトーヨコ通いのほたるは、なぜか3000万円を隠し持っていて――!社会の隅っこでつながった3人の奇妙な生活。仲良しの不動産屋・岡部(田中直樹)、索の元恋人・吉田(井之脇海)、オンボロアパートのオーナー・井の頭(坂井真紀)、ほたるのロクデナシな父・市ヶ谷(光石研)、そして謎多き母・楠(麻生久美子)らを巻き込みながら、奇想天外な方向へ…!笑って、泣いて、笑っちゃう、奇妙なホーム&ラブコメディー、開幕!!", + "id": 293920, + "link": "https://filmarks.com/dramas/16808/22629/episodes/293920" + }, + ], }, ], ) @@ -618,6 +654,7 @@ def test_info_with_results_single_4(client_nc, test_data, caplog) -> None: for field in fields: assert get_json_val(resp_data, f"$.data.{field}") == get_json_val(test_data, f"$.{field}") + assert get_json_val(test_data, "$.episodes[0]") in get_json_val(resp_data, "$.data.episodes") assert get_json_val(resp_data, "$.data.rating") == pytest.approx(get_json_val(test_data, "$.rating"), abs=0.5) assert get_json_val(resp_data, "$.data.mark_count") >= get_json_val(test_data, "$.mark_count") assert get_json_val(resp_data, "$.data.clip_count") >= get_json_val(test_data, "$.clip_count") From 4a01c979b1417c659e0822b839b26c4d367eacd4 Mon Sep 17 00:00:00 2001 From: e0406370 Date: Sat, 20 Dec 2025 23:55:20 +0800 Subject: [PATCH 3/3] [chore] remove dead 503 tests - increasing page param to a very large num no longer shows 503 but standard 404 --- tests/anime/test_anime_api.py | 33 ------------------------------ tests/drama/test_drama_api.py | 33 ------------------------------ tests/movie/test_movie_api.py | 38 ----------------------------------- 3 files changed, 104 deletions(-) diff --git a/tests/anime/test_anime_api.py b/tests/anime/test_anime_api.py index a310f68..65d6fa8 100644 --- a/tests/anime/test_anime_api.py +++ b/tests/anime/test_anime_api.py @@ -1,6 +1,4 @@ -from pydantic import Field from requests.exceptions import RequestException -from src.utility.models import ListParams, ReviewParams, SearchParams from tests.conftest import get_json_val import pytest @@ -150,34 +148,3 @@ def test_scrape_error_503_service_unavailable_session(client_nc, mocker, path, c assert resp.status_code == 503 assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." assert "Request to Filmarks failed: 'Testing - 503 Service Unavailable'" in caplog.text - - -@pytest.mark.parametrize("path", [ - "/animes/2592/3304/reviews?page=9999999999999999999", - "/list-anime/trend?page=999999999999999999", - "/list-anime/vod/prime_video?page=999999999999999999", - "/list-anime/year/2020s?page=999999999999999999", - "/list-anime/year/2025?page=999999999999999999", - "/list-anime/year/2019/1?page=999999999999999999", - "/list-anime/year/2019/99", - "/list-anime/company/41?page=999999999999999999", - "/list-anime/tag/駄作?page=999999999999999999", - "/list-anime/person/274563?page=999999999999999999", -]) -def test_scrape_error_503_service_unavailable_filmarks(client_nc, path, caplog) -> None: - class CustomParams(): - page: int = Field(1, gt=0) - client_nc.app.dependency_overrides[SearchParams] = CustomParams - client_nc.app.dependency_overrides[ReviewParams] = CustomParams - client_nc.app.dependency_overrides[ListParams] = CustomParams - - resp = client_nc.get(path) - resp_data = resp.json() - - assert resp.status_code == 503 - assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." - assert "Filmarks is temporarily unavailable" in caplog.text - - del client_nc.app.dependency_overrides[SearchParams] - del client_nc.app.dependency_overrides[ReviewParams] - del client_nc.app.dependency_overrides[ListParams] diff --git a/tests/drama/test_drama_api.py b/tests/drama/test_drama_api.py index a705712..54df0c7 100644 --- a/tests/drama/test_drama_api.py +++ b/tests/drama/test_drama_api.py @@ -1,6 +1,4 @@ -from pydantic import Field from requests.exceptions import RequestException -from src.utility.models import ListParams, ReviewParams, SearchParams from tests.conftest import get_json_val import pytest @@ -152,34 +150,3 @@ def test_scrape_error_503_service_unavailable_session(client_nc, mocker, path, c assert resp.status_code == 503 assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." assert "Request to Filmarks failed: 'Testing - 503 Service Unavailable'" in caplog.text - - -@pytest.mark.parametrize("path", [ - "/dramas/6055/8586/reviews?page=9999999999999999999", - "/list-drama/trend?page=999999999999999999", - "/list-drama/vod/prime_video?page=999999999999999999", - "/list-drama/year/2020s?page=999999999999999999", - "/list-drama/year/2025?page=999999999999999999", - "/list-drama/country/144?page=999999999999999999", - "/list-drama/genre/9?page=999999999999999999", - "/list-drama/tag/駄作?page=999999999999999999", - "/list-drama/person/25499?page=999999999999999999", -]) -def test_scrape_error_503_service_unavailable_filmarks(client_nc, path, caplog) -> None: - class CustomParams(): - page: int = Field(1, gt=0) - client_nc.app.dependency_overrides[SearchParams] = CustomParams - client_nc.app.dependency_overrides[ReviewParams] = CustomParams - client_nc.app.dependency_overrides[ListParams] = CustomParams - - resp = client_nc.get(path) - resp_data = resp.json() - - assert resp.status_code == 503 - assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." - assert "Filmarks is temporarily unavailable" in caplog.text - - del client_nc.app.dependency_overrides[SearchParams] - del client_nc.app.dependency_overrides[ReviewParams] - del client_nc.app.dependency_overrides[ListParams] - diff --git a/tests/movie/test_movie_api.py b/tests/movie/test_movie_api.py index e88ccfd..0b176dd 100644 --- a/tests/movie/test_movie_api.py +++ b/tests/movie/test_movie_api.py @@ -1,6 +1,4 @@ -from pydantic import Field from requests.exceptions import RequestException -from src.utility.models import ListParams, ReviewParams, SearchParams from tests.conftest import get_json_val import pytest @@ -191,39 +189,3 @@ def test_scrape_error_503_service_unavailable_session(client_nc, mocker, path, c assert resp.status_code == 503 assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." assert "Request to Filmarks failed: 'Testing - 503 Service Unavailable'" in caplog.text - - -@pytest.mark.parametrize("path", [ - "/movies/14348/reviews?page=9999999999999999999", - "/list-movie/now?page=999999999999999999", - "/list-movie/coming-soon?page=999999999999999999", - "/list-movie/opening-this-week?page=999999999999999999", - "/list-movie/trend?page=999999999999999999", - "/list-movie/vod/prime_video?page=999999999999999999", - "/list-movie/award/19?page=999999999999999999", - "/list-movie/year/2010s?page=999999999999999999", - "/list-movie/year/2001?page=999999999999999999", - "/list-movie/country/5?page=999999999999999999", - "/list-movie/genre/903?page=999999999999999999", - "/list-movie/distributor/503?page=999999999999999999", - "/list-movie/series/1?page=999999999999999999", - "/list-movie/tag/洋画?page=999999999999999999", - "/list-movie/person/93709?page=999999999999999999", -]) -def test_scrape_error_503_service_unavailable_filmarks(client_nc, path, caplog) -> None: - class CustomParams(): - page: int = Field(1, gt=0) - client_nc.app.dependency_overrides[SearchParams] = CustomParams - client_nc.app.dependency_overrides[ReviewParams] = CustomParams - client_nc.app.dependency_overrides[ListParams] = CustomParams - - resp = client_nc.get(path) - resp_data = resp.json() - - assert resp.status_code == 503 - assert get_json_val(resp_data, "$.detail") == "The service is currently unavailable." - assert "Filmarks is temporarily unavailable" in caplog.text - - del client_nc.app.dependency_overrides[SearchParams] - del client_nc.app.dependency_overrides[ReviewParams] - del client_nc.app.dependency_overrides[ListParams]