How to test a flask app using pytest to get the coverage rate up - sqlite

Currently this is my app.py files
# imports - standard imports
import json
import os
import sqlite3
# imports - third party imports
from flask import Flask, Response, jsonify, redirect
from flask import render_template as render
from flask import request, url_for
DATABASE_NAME = "inventory.sqlite"
# setting up Flask instance
app = Flask(__name__)
app.config.from_mapping(
SECRET_KEY="dev",
DATABASE=os.path.join(app.instance_path, "database", DATABASE_NAME),
)
# listing views
link = {x: x for x in ["location", "product", "movement"]}
link["index"] = "/"
def init_database():
db = sqlite3.connect(DATABASE_NAME)
cursor = db.cursor()
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS
products(prod_id INTEGER PRIMARY KEY AUTOINCREMENT,
prod_name TEXT UNIQUE NOT NULL,
prod_quantity INTEGER NOT NULL,
unallocated_quantity INTEGER);
"""
)
cursor.execute(
"""
CREATE TRIGGER IF NOT EXISTS default_prod_qty_to_unalloc_qty
AFTER INSERT ON products
FOR EACH ROW
WHEN NEW.unallocated_quantity IS NULL
BEGIN
UPDATE products SET unallocated_quantity = NEW.prod_quantity WHERE rowid = NEW.rowid;
END;
"""
)
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS location(loc_id INTEGER PRIMARY KEY AUTOINCREMENT,
loc_name TEXT UNIQUE NOT NULL);
"""
)
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS logistics(trans_id INTEGER PRIMARY KEY AUTOINCREMENT,
prod_id INTEGER NOT NULL,
from_loc_id INTEGER NULL,
to_loc_id INTEGER NULL,
prod_quantity INTEGER NOT NULL,
trans_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(prod_id) REFERENCES products(prod_id),
FOREIGN KEY(from_loc_id) REFERENCES location(loc_id),
FOREIGN KEY(to_loc_id) REFERENCES location(loc_id));
"""
)
db.commit()
#app.route("/product", methods=["POST", "GET"])
def product() -> Response | str:
init_database()
msg = None
db = sqlite3.connect(DATABASE_NAME)
cursor = db.cursor()
cursor.execute("SELECT * FROM products")
products = cursor.fetchall()
if request.method == "POST":
prod_name = request.form["prod_name"]
quantity = request.form["prod_quantity"]
transaction_allowed = False
if prod_name not in ["", " ", None] and quantity not in ["", " ", None]:
transaction_allowed = True
if transaction_allowed:
try:
cursor.execute(
"INSERT INTO products (prod_name, prod_quantity) VALUES (?, ?)",
(prod_name, quantity),
)
db.commit()
except sqlite3.Error as e:
msg = f"An error occurred: {e.args[0]}"
else:
msg = f"{prod_name} added successfully"
if msg:
print(msg)
return redirect(url_for("product"))
return render(
"product.html",
link=link,
products=products,
transaction_message=msg,
title="Products Log",
)
and this is my test function in test_product.py. I want to test my function to get my coverage on sonarcloud to be 100%. And the pytest function below seems to have no use. I must say I am very beginner to it and I am still learning.
import requests
import app
import pytest
ENDPOINT = "http://127.0.0.1:5000/product"
app.init_database()
def test_product_GET():
response = requests.get(ENDPOINT)
assert response.status_code == 200
# assert "Products Log" in response.text
#pytest.fixture()
def test_product_POST_valid():
response = requests.post(ENDPOINT, data={"prod_name": "product1", "prod_quantity": "10"}, allow_redirects=True)
assert response.status_code == 200
# assert "product1 added successfully" in response.text
#pytest.fixture()
def test_product_POST_invalid():
response = requests.post(ENDPOINT, data={"prod_name": "", "prod_quantity": ""}, allow_redirects=True)
assert response.status_code == 200
# assert "An error occurred" in response.text
I am not sure of how to make this work wihtout using request and it does have 0% coverage and I want to test this code in sonarcloud

Related

SQLlite: Why is the insert SQL statement not updating the table in the database?

I have a Python Pysimplegui form that is connecting to a SQL Lite database.
The function to create and update a table called fitness_class is:
def createFitnessClassTable(conn):
'''
'''
SQL = """CREATE TABLE IF NOT EXISTS fitness_class (
fitness_class_id integer PRIMARY KEY,
fitness_class_name text NOT NULL,
date_and_time text NOT NULL
);"""
sql_create = """INSERT OR IGNORE INTO fitness_class(fitness_class_id,fitness_class_name,date_and_time)
VALUES(?,?,?)"""
data = [
(1, 'Cardio', 'Thursday 35pm'),
(2, 'Pilates', 'Friday 911am'),
(3, 'Spin', 'Monday 2 4pm')
]
try:
c = conn.cursor()
c.execute(SQL)
c.close()
connection = conn.cursor()
connection.executemany(sql_create, data)
connection.close()
except Error as e:
# print(e)
sg.Popup(e)
return False
return True
When the function is called, this is creating the table and I am not getting any error messages. However, this is not saving the data (from the insert statement) either.
These are the rows related to calling the function
#!/usr/bin/python
import os
import PySimpleGUI as sg
from tkinter import *
import re
import sys
import PySimpleGUI as sg
import sqlite3
sys.path.append(os.path.dirname(__file__))
conn = dbconnect()
createFitnessClassTable(conn=conn)
conn.commit
conn.close()
I am confused because I have a similar function to create another table which is working correctly (i.e. creating the table if it doesn't exist and populating it with the data):
def createMembershipTable(conn):
'''
'''
SQL = """
CREATE TABLE IF NOT EXISTS membership (
membership_type_id integer PRIMARY KEY,
membership_type text NOT NULL,
weekly_amount real NOT NULL
);"""
sql_create = """INSERT OR IGNORE INTO membership(membership_type_id,membership_type,weekly_amount)
VALUES(?,?,?)"""
data = [(1, 'Basic', 10.00),
(2, 'Regular', 15.00),
(3, 'Premium', 20.00)
]
try:
c = conn.cursor()
c.execute(SQL)
c.close()
connection = conn.cursor()
connection.executemany(sql_create, data)
connection.close()
except Error as e:
print(e)
return False
return True
The lines to call that function:
conn = dbconnect()
createMembershipTable(conn)
conn.commit()
conn.close()
What am I missing? Why would the function createMembershipTable work as expected though the function createFitnessClassTable not work when they are both almost identical?
Just after posting (and 3 hours later), I realized the issue:
It was missing parenthesis after the conn.commit() in the createFitnessClassTable function call.

Adding UniqueKey constraint to a sqlite3 table with Flask-Migration fails with IntrgrityError

So I using sqlite as my test database and have the following classes in my models.py
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, index=True)
username = db.Column(db.String(40), unique=True, index=True)
password_hash = db.Column(db.String(256))
alternate_id = db.Column(db.String(100))
posts = db.relationship('Posts', backref='author', lazy=True)
def get_id(self):
return str(self.alternate_id)
def __init__(self, username, password):
self.username = username
self.password_hash = generate_password_hash(password)
self.alternate_id = my_serializer.dumps(
self.username + self.password_hash)
def verify_password(self, password):
if check_password_hash(self.password_hash, password):
return "True"
class Posts(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(100), nullable=False, unique=True)
description = db.Column(db.String(1500))
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
def __init__(self, title, description, author_id):
self.title = title
self.description = description
self.author_id = author_id
I added the unique key constraint to column title in my Posts class and then was trying to update the schema using Flask-Migrate.
Initially I was getting the No support for ALTER of constraints in SQLite dialect errors since sqlite3 does not support it through alembic. So I looked the alembic documentation and found that you can actually do such migrations using the batch mode migrations. So I updated my migration script as below.
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("posts") as batch_op:
batch_op.create_unique_constraint('unique_title', ['title'])
# ### end Alembic commands ###
Now when I try to run flask db upgrade I get the following error
sqlalchemy.exc.IntegrityError: (sqlite3.IntegrityError) UNIQUE constraint failed: _alembic_tmp_posts.title [SQL: 'INSERT INTO
_alembic_tmp_posts (id, title, description, author_id) SELECT posts.id, posts.title, posts.description, posts.author_id \nFROM posts'] (Background on this error at: http://sqlalche.me/e/gkpj`)
I am not able to understand that why IntegrityError exception is being thrown because if I look at the insert statement the number of columns are same.
Does it have something to do with the authors_id column having a foreignkey constraint on it ?
The database table column on which I was adding the unique constraint had duplicate data and that was the reason I was getting the integrity error, I am just surprised why I didn't notice that earlier.
So once I removed one of the duplicate rows, the database upgrade was successful.

Error with connectin to database using sqlite3 with python

When running following code I get error which I posted at the bottom of the post. I followed tutorial on creating databases from here.
These functions worked when creating previous databases though.
I am using jupyter notebook v 3.5.
def create_connection(db_file):
try:
conn = sqlite3.connect(db_file)
return conn
except sqlite3.Error as e:
print("Connection error: [%s]" % e)
return None
def create_table(conn, create_table_sql ):
try:
c = conn.cursor()
c.execute(create_table_sql)
except sqlite3.Error as e:
print("Connection error while creating table: [%s]" % e)
def sqlTables(db_file):
sql_create_synset_table = ''' CREATE TABLE IF NOT EXISTS table_data (
id TEXT NOT NULL,
status TEXT NOT NULL,
confidence_score INT NOT NULL,
); '''
conn = create_connection(db_file)
if conn is not None:
create_table(conn,sql_create_synset_table)
else:
print("Error! cannot create db conn.")
def upload_data(db_file):
sqlTables(db_file)
conn = create_connection(db_file)
cursor = conn.cursor()
with conn:
for i in range(len(id_list)):
s_id = id_list[i]
status = status_list[i]
conf = conf_list[i]
cursor.execute("INSERT INTO table_data(id, status, confidence_score) VALUES(?,?,?)"\
,(s_id, status, conf))
conn.commit()
upload_data("path/to/db/table.db")
Connection error while creating table: [near ")": syntax error]
---> 12 cursor.execute("INSERT INTO table_data(id, status, confidence_score) VALUES(?,?,?)" ,(sset_id, stus, conf))
OperationalError: no such table: table_data

Scrapy exports all results in one row in SQLite

I'm making a basic spider using Scrapy and want to store the data with SQLite. The spider is working fine and saves the data I want, but it writes all data on the same row in the database.
Here's my spider:
def parse(self, response):
for sel in response.xpath('//*[#class="class"]'):
item = ScrapedItem()
item['Name'] = sel.xpath('*/header/div//h2/a/text()').extract()
item['Site'] = sel.xpath('*/header/div/a[1]/text()').extract()
item['Category'] = sel.xpath('*/header/div/h6[3]/text()').extract()
yield item
And here is my pipeline:
import sqlite3 as lite
from xyz import settings
from xyz import items
con = None
class Pipeline(object):
def __init__(self):
self.setupDBCon()
self.createTables()
def process_item(self, item, spider):
self.storeInfoInDb(item)
return item
def storeInfoInDb(self, item):
self.cur.execute("INSERT INTO Table(\
Name, \
Site, \
Category\
) \
VALUES( ?, ?, ?, ? )", \
( \
str(item.get('Name', '')),
str(item.get('Site', '')),
str(item.get('Category', ''))
))
print item.get('Name', '')
self.con.commit()
def setupDBCon(self):
self.con = lite.connect('test.db')
self.cur = self.con.cursor()
def __del__(self):
self.closeDB()
def createTables(self):
self.dropAgencyTable()
self.createAgencyTable()
def createTable(self):
self.cur.execute("CREATE TABLE IF NOT EXISTS Table(id INTEGER PRIMARY KEY NOT NULL, \
Name TEXT, \
Site TEXT, \
Category TEXT )")
def dropTable(self):
self.cur.execute("DROP TABLE IF EXISTS Agency")
def closeDB(self):
self.con.close()
How do I save my scraped data in one separate row per scraped item?
Look at this answer by me. The problem is your spider is extracting all the items and storing it in a list and the yielding it to the items pipeline. So the item pipeline is not getting the information one at a time. The solution for this is to use a loop and iterate over all the desired rows one by one.

ProgrammingError Thread error in SQLAlchemy

I have a two simple tables in a sqlite db.
from sqlalchemy import MetaData, Table, Column, Integer, ForeignKey, \
create_engine, String
from sqlalchemy.orm import mapper, relationship, sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine('sqlite:///dir_graph.sqlite', echo=True)
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
session = Session()
Base = declarative_base()
class NodeType(Base):
__tablename__ = 'nodetype'
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
nodes = relationship('Node', backref='nodetype')
def __init__(self, name):
self.name = name
def __repr__(self):
return "Nodetype: %s" % self.name
class Node(Base):
__tablename__ = 'node'
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
type_id = Column(Integer,
ForeignKey('nodetype.id'))
def __init__(self, _name, _type_id):
self.name = _name
self.type_id = _type_id
Base.metadata.create_all(engine)
After the run I interact with the interpreter. e.g. n1= Node('Node1',1) to learn about sqlalchemy. After I did a session.commit() and try another statement e.g. n2 = Node('n2',1) I get this error:
sqlalchemy.exc.ProgrammingError: (ProgrammingError) SQLite objects created in a thread can only be used in that same thread.The object was created in thread id 3932 and this is thread id 5740 None None.
How can I continue a session after I did a commit ?
tnx
SQLite by default prohibits the usage of a single connection in more than one thread.
just add connect_args={'check_same_thread': False} parameter to your engine variable like
engine = create_engine('sqlite:///dir_graph.sqlite', connect_args={'check_same_thread': False}, echo=True)
According to sqlite3.connect:
By default, check_same_thread is True and only the creating thread may
use the connection. If set False, the returned connection may be
shared across multiple threads. When using multiple threads with the
same connection writing operations should be serialized by the user to
avoid data corruption.

Resources