RED: 404 and schema test fail
This commit is contained in:
parent
73482bea56
commit
1fd9f7a9a6
2
Makefile
2
Makefile
|
@ -12,7 +12,7 @@ run_dev:
|
|||
git ls-files | entr -r pipenv run python dev.py
|
||||
|
||||
tdd:
|
||||
git ls-files | entr make test opt='--lf --ff'
|
||||
git ls-files | entr make test opt=$(opt)
|
||||
git ls-files | entr make functionnal_tests
|
||||
|
||||
watch_db:
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
from fastapi.testclient import TestClient
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlalchemy import MetaData
|
||||
|
||||
from database import Base
|
||||
from dev import app, get_db
|
||||
from models import Movie
|
||||
|
||||
import pytest
|
||||
import crud
|
||||
import contextlib
|
||||
|
||||
import random
|
||||
import inspect
|
||||
import unittest
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///:memory:"
|
||||
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
def override_get_db():
|
||||
try:
|
||||
db = TestingSessionLocal()
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def rand_name():
|
||||
import sys
|
||||
|
||||
caller = sys._getframe(1).f_code.co_name
|
||||
name = f"{caller}_{random.randint(1, 1000)}"
|
||||
return name
|
||||
|
||||
|
||||
def test_get_movie_404_if_not_found():
|
||||
response = client.get("/movies/-1")
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
def test_list_movies():
|
||||
response = client.get("/movies/")
|
||||
# assert response.json() == []
|
||||
|
||||
N = 10
|
||||
names = []
|
||||
for _ in range(N):
|
||||
name = rand_name()
|
||||
|
||||
names.append(name)
|
||||
response = client.post("/movies/", json={"name": name})
|
||||
assert response.status_code == 200
|
||||
|
||||
movies = client.get("/movies/")
|
||||
movies_by_name = {m["name"]: m for m in movies.json()}
|
||||
found = list(movies_by_name[name] for name in names)
|
||||
assert all(movies_by_name[name] for name in names)
|
||||
|
||||
|
||||
def test_create_movie_api():
|
||||
name = f"rand_{random.randint(1, 1000)}"
|
||||
response = client.post("/movies/", json={"name": name})
|
||||
|
||||
assert response.status_code == 200
|
||||
movie_id = response.json()["id"]
|
||||
assert f"Created {name}" in response.json()["message"]
|
||||
response = client.get(f"/movies/{movie_id}")
|
||||
assert response.json()["name"] == name
|
||||
|
||||
|
||||
class ApiTestCase(unittest.TestCase):
|
||||
def test_payload_content_in_and_out_loopback(self):
|
||||
be_the_fun_in_de_funes = {
|
||||
"id": 1,
|
||||
"title": "La Grande Vadrouille",
|
||||
"description": "During World War II, two French civilians and a downed English Bomber Crew set "
|
||||
"out from Paris to cross the demarcation line between Nazi-occupied Northern France and the "
|
||||
"South. From there they will be able to escape to England. First, they must avoid German troops -"
|
||||
"and the consequences of their own blunders.",
|
||||
"genres": ["Comedy", "War"],
|
||||
"release_date": "1966-12-07",
|
||||
"vote_average": 7.7,
|
||||
"vote_count": 1123,
|
||||
}
|
||||
|
||||
domain_keys = {k for k in be_the_fun_in_de_funes if k not in ["id"]}
|
||||
payload = {k: be_the_fun_in_de_funes[k] for k in domain_keys}
|
||||
# FIXME
|
||||
payload["name"] = payload["title"]
|
||||
response = client.post("/movies/", json=payload)
|
||||
|
||||
assert response.status_code == 200
|
||||
movie_id = response.json()["id"]
|
||||
|
||||
loopback_fetch = client.get(f"/movies/{movie_id}")
|
||||
assert loopback_fetch.status_code == 200
|
||||
loopback_payload = loopback_fetch.json()
|
||||
# check for keys
|
||||
for attribute_name in domain_keys:
|
||||
with self.subTest(attribute_name=attribute_name):
|
||||
assert attribute_name in loopback_payload
|
|
@ -28,14 +28,7 @@ TestingSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engin
|
|||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
def clear_db():
|
||||
# Make this a more generic functional tool for test
|
||||
meta = MetaData()
|
||||
with contextlib.closing(engine.connect()) as con:
|
||||
trans = con.begin()
|
||||
for table in reversed(meta.sorted_tables):
|
||||
con.execute(table.delete())
|
||||
trans.commit()
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def override_get_db():
|
||||
|
@ -47,16 +40,24 @@ def override_get_db():
|
|||
db.close()
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
|
||||
def clear_db():
|
||||
# Make this a more generic functional tool for test
|
||||
meta = MetaData()
|
||||
with contextlib.closing(engine.connect()) as con:
|
||||
trans = con.begin()
|
||||
for table in reversed(meta.sorted_tables):
|
||||
con.execute(table.delete())
|
||||
trans.commit()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def db_context():
|
||||
yield from override_get_db()
|
||||
|
||||
|
||||
app.dependency_overrides[get_db] = override_get_db
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def rand_name():
|
||||
import sys
|
||||
|
||||
|
@ -96,13 +97,3 @@ def test_list_movies():
|
|||
movies = client.get("movies")
|
||||
movies_by_name = {m["name"]: m for m in movies.json()}
|
||||
assert all(movies_by_name[name] for name in names)
|
||||
|
||||
|
||||
def test_create_movie_api():
|
||||
name = f"rand_{random.randint(1, 1000)}"
|
||||
response = client.post("/movies/", json={"name": name})
|
||||
assert response.status_code == 200
|
||||
movie_id = response.json()["id"]
|
||||
assert f"Created {name}" in response.json()["message"]
|
||||
response = client.get(f"/movies/{movie_id}")
|
||||
assert response.json()["name"] == name
|
||||
|
|
Loading…
Reference in New Issue