Pagination works from begining to end
This commit is contained in:
parent
8ef89ecc85
commit
3897771ecb
|
@ -187,43 +187,33 @@ class BaseCrud(unittest.TestCase):
|
|||
# test last page has no next
|
||||
|
||||
def test_list_movies_pagination_back_forth(self):
|
||||
return
|
||||
response = client.get("/movies/")
|
||||
assert response.status_code == 200
|
||||
# assert response.json() == []
|
||||
primary_count = response.json()["count"]
|
||||
nb_movies = response.json()["count"]
|
||||
|
||||
N = 10
|
||||
names = []
|
||||
for _ in range(N):
|
||||
name = rand_name()
|
||||
|
||||
names.append(name)
|
||||
self.create_payload["title"] = name
|
||||
for _ in range(3):
|
||||
self.create_payload["title"] = rand_name()
|
||||
response = client.post("/movies/", json=self.create_payload)
|
||||
assert response.status_code == 200
|
||||
|
||||
movies = client.get("/movies/").json()
|
||||
first, *_, last = movies["movies"]
|
||||
response = client.get("/movies/")
|
||||
nb_movies = response.json()["count"]
|
||||
|
||||
pagenum = 1
|
||||
pagesize = 5
|
||||
pagesize = 2
|
||||
|
||||
first, *_, last = client.get("/movies/").json()["movies"]
|
||||
|
||||
while current_movies := client.get(
|
||||
f"/movies/?pagenum={pagenum}&pagesize={pagesize}"
|
||||
).json():
|
||||
break
|
||||
next_page_num = current_movies.get("next_page")
|
||||
assert next_page_num != pagenum
|
||||
|
||||
sliced_movies = client.get("/movies/").json()["movies"][
|
||||
(pagenum - 1) * pagesize : pagenum * pagesize
|
||||
]
|
||||
|
||||
sliced_titles = [m["title"] for m in sliced_movies]
|
||||
|
||||
movies_paginate = ["movies"]
|
||||
|
||||
paginate_titles = [m["title"] for m in movies_paginate]
|
||||
|
||||
assert sliced_titles == paginate_titles
|
||||
if next_page_num is None:
|
||||
assert current_movies["movies"][-1] == last
|
||||
break
|
||||
else:
|
||||
assert next_page_num == pagenum + 1
|
||||
pagenum = next_page_num
|
||||
|
||||
def test_list_movies_pagination(self):
|
||||
response = client.get("/movies/")
|
||||
|
|
Loading…
Reference in New Issue