FastAPI, column computers.id does not exist - fastapi

Here is the code of my main.py in FastAPI:
from typing import List, Union
import datetime
import databases
import sqlalchemy
from fastapi import FastAPI
from pydantic import BaseModel
DATABASE_URL = "postgresql://username:password#localhost/collector"
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
computers = sqlalchemy.Table(
"computers",
metadata,
sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True, index=True),
sqlalchemy.Column("computername", sqlalchemy.String),
sqlalchemy.Column("computerip", sqlalchemy.String),
sqlalchemy.Column("computerexternalip", sqlalchemy.String),
sqlalchemy.Column("time", sqlalchemy.DateTime),
)
engine = sqlalchemy.create_engine(
DATABASE_URL
)
metadata.create_all(engine)
class ComputerBase(BaseModel):
computername: str
computerip: str
computerexternalip: str
time: str = datetime.datetime
class ComputerIn(ComputerBase):
pass
class Computer(ComputerBase):
id: int
class Config:
orm_mode = True
app = FastAPI()
#app.on_event("startup")
async def startup():
await database.connect()
#app.on_event("shutdown")
async def shutdown():
await database.disconnect()
#app.get("/computers/", response_model=List[Computer])
async def read_computers():
query = computers.select()
print(query)
return await database.fetch_all(query)
#app.post("/computers/", response_model=Computer)
async def create_computer(computer: ComputerIn):
current_time = datetime.datetime.utcnow
query = computers.insert().values(computername=computer.computername, computerip=computer.computerip, computerexternalip=computer.computerexternalip, time=current_time)
last_record_id = await database.execute(query)
return {**computer.dict(), "id": last_record_id}
When I go on https://localhost:8000/computers, I get this error:
asyncpg.exceptions.UndefinedColumnError: column computers.id does not
exist
Which I don't understand since I declare a table names "computers" with an id column at the begining of my code.
Any idea ?
Thank you

Related

Can't access my router in fastapi with postman

If I try to access my router(/users/signup)
with postman(send to http://127.0.0.1:8000/users/signup), it keeps the response "detail": "Not Found"
and fastapi is showing
api | INFO: 172.19.0.1:43736 - "GET /users/signup HTTP/1.1" 404 Not Found
Originally I tried to use post instead of get but it doesn't work anyway
Here is the code in my main.py in the app folder and user.py in the routers folder
main.py:
import os
import sys
from .database import init_db
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
baseurl = os.path.dirname(os.path.abspath(__file__))
from fastapi import FastAPI, APIRouter
from .routers.user import router as user_router
from .routers.article import router as article_router
from fastapi.middleware.cors import CORSMiddleware
router = APIRouter()
router.include_router(user_router, prefix="/users",tags=["users"])
router.include_router(article_router, prefix="/articles",tags=["articles"])
app = FastAPI()
origins = ["http://localhost:3000"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
#app.on_event("startup")
async def on_startup():
await init_db()
#app.get("/")
async def root():
return {"message ": " Welcome Fastapi"}
#app.get("/hello/{name}")
async def say_hello(name: str):
return {"message": f"Hello {name}"}
user.py
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
import app.repository.user as dao
from app.admin.utils import currentTime
from app.database import get_db
from app.schemas.user import UserDTO
from app.admin.utils import currentTime
router = APIRouter()
#router.get("/signup")
async def signup(user: UserDTO, db: Session = Depends(get_db)):
print(f" 회원가입에 진입한 시간: {currentTime()} ")
print(f"SignUp Inform : {user}")
result = dao.signup(user, db)
if result == "":
result = "failure"
return {"data": result}
#router.post("/login")
async def login(user: UserDTO, db: Session = Depends(get_db)):
return_user = dao.login(user, db)
print(f"로그인 정보 : {return_user}")
return {"data": return_user}
#router.put("/modify/{id}")
async def update(id: str, item: UserDTO, db: Session = Depends(get_db)):
dao.update(id, item, db)
return {"data": "success"}
#router.delete("/delete/{id}", tags=['age'])
async def delete(id: str, item: UserDTO, db: Session = Depends(get_db)):
dao.delete(id, item, db)
return {"data": "success"}
#router.get("/page/{page}")
async def get_users(page: int, db: Session = Depends(get_db)):
ls = dao.find_users(page, db)
return {"data": ls}
#router.get("/email/{id}")
async def get_user(id: str, db: Session = Depends(get_db)):
dao.find_user(id, db)
return {"data": "success"}
#router.get("/point/{search}/{page}")
async def get_users_by_point(search: int, page: int, db: Session = Depends(get_db)):
dao.find_users_by_point(search, page, db)
return {"data": "success"}
What I was expecting is send {"user_email": "hong#naver.com", "id": "hong1234", "username": "홍길동", "password": "hong1234", "cpassword": "hong1234"} by using postman and recieve data:success and enrollment the data to DB
I tried the post method and get method and change router address but it didn't work anyway
You need to include router in your app too:
router = APIRouter()
router.include_router(user_router, prefix="/users",tags=["users"])
router.include_router(article_router, prefix="/articles",tags=["articles"])
app = FastAPI()
app.include_router(router) # Add this.
Alternatively remove main router:
# Remove router = APIRouter()
app = FastAPI()
app.include_router(user_router, prefix="/users",tags=["users"])
app.include_router(article_router, prefix="/articles",tags=["articles"])

pytest with httpx.AsyncClient cannot find newly created database records

I am trying to setup pytest with httpx.AsyncClient and sqlalchemy AsyncSession with FastAPI. Everything practically mimics the tests in FastAPI Fullstack repo, except for async stuff.
No issues with CRUD unit tests. The issue arises when running API tests using AsyncClient from httpx lib.
The issue is, any request made by client only has access to the users (in my case) created before initializing (setting up) the client fixture.
My pytest conftest.py setup is like this:
from typing import Dict, Generator, Callable
import asyncio
from fastapi import FastAPI
import pytest
# from sqlalchemy.orm import Session
from sqlalchemy.ext.asyncio import AsyncSession
from httpx import AsyncClient
import os
import warnings
import sqlalchemy as sa
from alembic.config import Config
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.orm import sessionmaker
async def get_test_session() -> Generator:
test_engine = create_async_engine(
settings.SQLALCHEMY_DATABASE_URI + '_test',
echo=False,
)
# expire_on_commit=False will prevent attributes from being expired
# after commit.
async_sess = sessionmaker(
test_engine, expire_on_commit=False, class_=AsyncSession
)
async with async_sess() as sess, sess.begin():
yield sess
#pytest.fixture(scope="session")
async def async_session() -> Generator:
test_engine = create_async_engine(
settings.SQLALCHEMY_DATABASE_URI + '_test',
echo=False,
pool_size=20, max_overflow=0
)
# expire_on_commit=False will prevent attributes from being expired
# after commit.
async_sess = sessionmaker(
test_engine, expire_on_commit=False, class_=AsyncSession
)
yield async_sess
#pytest.fixture(scope="session")
async def insert_initial_data(async_session:Callable):
async with async_session() as session, session.begin():
# insert first superuser - basic CRUD ops to insert data in test db
await insert_first_superuser(session)
# insert test.superuser#example.com
await insert_first_test_user(session)
# inserts test.user#example.com
#pytest.fixture(scope='session')
def app(insert_initial_data) -> FastAPI:
return FastAPI()
#pytest.fixture(scope='session')
async def client(app: FastAPI) -> Generator:
from app.api.deps import get_session
app.dependency_overrides[get_session] = get_test_session
async with AsyncClient(
app=app, base_url="http://test",
) as ac:
yield ac
# reset dependencies
app.dependency_overrides = {}
So in this case, only the superuser test.superuser#example.com and normal user test.user#example.com are available during running API tests. e.g., code below is able to fetch the access token just fine:
async def authentication_token_from_email(
client: AsyncClient, session: AsyncSession,
) -> Dict[str, str]:
"""
Return a valid token for the user with given email.
"""
email = 'test.user#example.com'
password = 'test.user.password'
user = await crud.user.get_by_email(session, email=email)
assert user is not None
data = {"username": email, "password": password}
response = await client.post(f"{settings.API_V1_STR}/auth/access-token",
data=data)
auth_token = response.cookies.get('access_token')
assert auth_token is not None
return auth_token
but, the modified code below doesn't - here I try to insert new user, and then log in to get access token.
async def authentication_token_from_email(
client: AsyncClient, session: AsyncSession,
) -> Dict[str, str]:
"""
Return a valid token for the user with given email.
If the user doesn't exist it is created first.
"""
email = random_email()
password = random_lower_string()
user = await crud.user.get_by_email(session, email=email)
if not user:
user_in_create = UserCreate(email=email,
password=password)
user = await crud.user.create(session, obj_in=user_in_create)
else:
user_in_update = UserUpdate(password=password)
user = await crud.user.update(session, db_obj=user, obj_in=user_in_update)
assert user is not None
# works fine up to this point, user inserted successfully
# now try to send http request to fetch token, and user is not found in the db
data = {"username": email, "password": password}
response = await client.post(f"{settings.API_V1_STR}/auth/access-token",
data=data)
auth_token = response.cookies.get('access_token')
# returns None.
return auth_token
What is going on here ? Appreciate any help!
Turns out all I needed to do is, for reason I do not understand, is to define the FastAPI dependency override function inside the client fixture:
before
async def get_test_session() -> Generator:
test_engine = create_async_engine(
settings.SQLALCHEMY_DATABASE_URI + '_test',
echo=False,
)
# expire_on_commit=False will prevent attributes from being expired
# after commit.
async_sess = sessionmaker(
test_engine, expire_on_commit=False, class_=AsyncSession
)
async with async_sess() as sess, sess.begin():
yield sess
#pytest.fixture(scope='session')
async def client(app: FastAPI) -> Generator:
from app.api.deps import get_session
app.dependency_overrides[get_session] = get_test_session
async with AsyncClient(
app=app, base_url="http://test",
) as ac:
yield ac
# reset dependencies
app.dependency_overrides = {}
after
#pytest.fixture(scope="function")
async def session(async_session) -> Generator:
async with async_session() as sess, sess.begin():
yield sess
#pytest.fixture
async def client(app: FastAPI, session:AsyncSession) -> Generator:
from app.api.deps import get_session
# this needs to be defined inside this fixture
# this is generate that yields session retrieved from `session` fixture
def get_sess():
yield session
app.dependency_overrides[get_session] = get_sess
async with AsyncClient(
app=app, base_url="http://test",
) as ac:
yield ac
app.dependency_overrides = {}
I'd appreciate any explanation of this behavior. Thanks!

TypeError: 'coroutine' object is not subscriptable in python Quart Framework

from quart import Quart, request, render_template, jsonify
import json
import os, sys
import pandas as pd
import requests
import asyncio
from pylon.model.db_models import RawFiles
from pylon.orm import db
app = Quart(__name__)
#app.route('/upload', methods=['POST'])
async def handle_form():
f = await request.files['filename']
f.save(f.filename)
data = pd.read_csv(f.filename)
data.to_json("json_data.json")
data = pd.read_json("json_data.json")
os.remove("json_data.json")
os.remove(f.filename)
print(type(data))
print(data)
return ""
#app.route("/")
async def index():
return await render_template('upload.html')
if __name__ == "__main__":
app.run(host="bheem11.arch.des.co", port=5043, debug = True)
I am getting one error described in title. I am working in quartz framework in python. Hoping for proper solution. Actually i am getting coroutine error when #app.route("/upload", methods = "post") execute.
This line await request.files['filename'] should be (await request.files)['filename']. Without the parenthesis everything to the right of await is evaluated first, which results in the attempt to subscribe (['filename'] operation) the files attribute. This doesn't work as the files attribute returns a coroutine - which is not subscriptable. There is more on this in the Quart documentation.

Tornado performance issue with MySQLand Redis

I have a tornado server running with MySQL for DB and Redis for cache. I am using web socket to send/receive data. My code is like this:
Server
import logging
import os.path
import uuid
import sys
import json
import tornadis
import tormysql
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
from tornado import gen
from tornado.concurrent import Future
from tornado.options import define, options
#gen.coroutine
def getFromDB(query):
with (yield dbPool.Connection()) as conn:
with conn.cursor() as cursor:
yield cursor.execute(query)
datas = cursor.fetchall()
return datas
return None
#gen.coroutine
def getFromCache(cmd):
pipeline = tornadis.Pipeline()
pipeline.stack_call(cmd)
with (yield cachePool.connected_client()) as singleClient:
redisResult = yield singleClient.call(pipeline)
if isinstance(redisResult, tornadis.TornadisException):
print("Redis exception: %s"%(redisResult))
else:
return redisResult
async def getData(dbQuery, cacheQuery):
waitDict = {}
if dbQuery:
waitDict['db'] = getFromDB(dbQuery)
if cacheQuery:
waitDict['cache'] = getFromCache(cacheQuery)
resultList = []
if len(waitDict) > 0:
await gen.multi(waitDict)
if 'db' in waitDict:
dbRes = waitDict['db'].result()
if dbRes:
for eachResult in dbRes:
changeRes = someFunct(eachResult)
resultList.append(changeRes)
if 'cache' in waitDict:
cacheRes = waitDict['cache'].result()
if cacheRes:
for eachResult in cacheRes:
changeRes = someFunct(eachResult)
resultList.append(changeRes)
return resultList
class SocketHandler(tornado.websocket.WebSocketHandler):
SUPPORTED_METHODS = ("GET")
def open(self):
print("Socket open:%s"%(self))
def on_close(self):
print("Socket closed:%s"%(self))
async def on_message(self, inp):
if requestForData:
ret = await getData(dbQuery, cacheQuery)
self.write_message(ret)
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/sock", SocketHandler),
]
define("port", default=8000, help="run on the given port", type=int)
tornado.options.parse_command_line()
app = Application()
app.listen(options.port)
print("PORT:%s"%(options.port))
tornado.ioloop.IOLoop.current().start()
I am using tornadis for Redis and tormysql for MySQL.
I am running this setup on amazon linux instance m5.large with 2vCPUs memeory:8Gib.
Client
I am trying to simulate the traffic using web socket. The code is like this:
import sys
import json
import asyncio
import websockets
def getData():
for i in range(100):
async with websockets.connect(SOCKET_URL, extra_headers=extraHeaders) as websocket:
for i an range(100):
await websocket.send("get data")
reply = await websocket.recv()
print(reply)
asyncio.get_event_loop().run_until_complete(getData())
I am running multiple instance of the client.
The server is running good but its able to handle only 25 connections. After 25 connections the delay for the reply from the server increases. I want server to reply to be very fast. How do I decrease the delay for the response? So is there any problem in the code?

Query graphite index.json for a specific sub-tree

I'm querying Graphite's index.json to get all the metrics. Is there an option to pass a root metric and get only a sub-tree? Something like:
http://<my.graphite>/metrics/index.json?query="my.metric.subtree"
That is not supported.
What you can do however is call /metrics/find recursively (call it again for each branch encountered)
Something like this:
#!/usr/bin/python
from __future__ import print_function
import requests
import json
import argparse
try:
from Queue import Queue
except:
from queue import Queue
from threading import Thread, Lock
import sys
import unicodedata
outLock = Lock()
def output(msg):
with outLock:
print(msg)
sys.stdout.flush()
class Walker(Thread):
def __init__(self, queue, url, user=None, password=None, seriesFrom=None, depth=None):
Thread.__init__(self)
self.queue = queue
self.url = url
self.user = user
self.password = password
self.seriesFrom = seriesFrom
self.depth = depth
def run(self):
while True:
branch = self.queue.get()
try:
branch[0].encode('ascii')
except Exception as e:
with outLock:
sys.stderr.write('found branch with invalid characters: ')
sys.stderr.write(unicodedata.normalize('NFKD', branch[0]).encode('utf-8','xmlcharrefreplace'))
sys.stderr.write('\n')
else:
if self.depth is not None and branch[1] == self.depth:
output(branch[0])
else:
self.walk(branch[0], branch[1])
self.queue.task_done()
def walk(self, prefix, depth):
payload = {
"query": (prefix + ".*") if prefix else '*',
"format": "treejson"
}
if self.seriesFrom:
payload['from']=self.seriesFrom
auth = None
if self.user is not None:
auth = (self.user, self.password)
r = requests.get(
self.url + '/metrics/find',
params=payload,
auth=auth,
)
if r.status_code != 200:
sys.stderr.write(r.text+'\n')
raise Exception(
'Error walking finding series: branch={branch} reason={reason}'
.format(branch=unicodedata.normalize('NFKD', prefix).encode('ascii','replace'), reason=r.reason)
)
metrics = r.json()
for metric in metrics:
try:
if metric['leaf']:
output(metric['id'])
else:
self.queue.put((metric['id'], depth+1))
except Exception as e:
output(metric)
raise e
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--url", help="Graphite URL", required=True)
parser.add_argument("--prefix", help="Metrics prefix", required=False, default='')
parser.add_argument("--user", help="Basic Auth username", required=False)
parser.add_argument("--password", help="Basic Auth password", required=False)
parser.add_argument("--concurrency", help="concurrency", default=8, required=False, type=int)
parser.add_argument("--from", dest='seriesFrom', help="only get series that have been active since this time", required=False)
parser.add_argument("--depth", type=int, help="maximum depth to traverse. If set, the branches at the depth will be printed", required=False)
args = parser.parse_args()
url = args.url
prefix = args.prefix
user = args.user
password = args.password
concurrency = args.concurrency
seriesFrom = args.seriesFrom
depth = args.depth
queue = Queue()
for x in range(concurrency):
worker = Walker(queue, url, user, password, seriesFrom, depth)
worker.daemon = True
worker.start()
queue.put((prefix, 0))
queue.join()
Note: this code comes from: https://github.com/grafana/cloud-graphite-scripts/blob/master/query/walk_metrics.py

Resources