SQLlite: Why is the insert SQL statement not updating the table in the database? - sqlite

I have a Python Pysimplegui form that is connecting to a SQL Lite database.
The function to create and update a table called fitness_class is:
def createFitnessClassTable(conn):
'''
'''
SQL = """CREATE TABLE IF NOT EXISTS fitness_class (
fitness_class_id integer PRIMARY KEY,
fitness_class_name text NOT NULL,
date_and_time text NOT NULL
);"""
sql_create = """INSERT OR IGNORE INTO fitness_class(fitness_class_id,fitness_class_name,date_and_time)
VALUES(?,?,?)"""
data = [
(1, 'Cardio', 'Thursday 35pm'),
(2, 'Pilates', 'Friday 911am'),
(3, 'Spin', 'Monday 2 4pm')
]
try:
c = conn.cursor()
c.execute(SQL)
c.close()
connection = conn.cursor()
connection.executemany(sql_create, data)
connection.close()
except Error as e:
# print(e)
sg.Popup(e)
return False
return True
When the function is called, this is creating the table and I am not getting any error messages. However, this is not saving the data (from the insert statement) either.
These are the rows related to calling the function
#!/usr/bin/python
import os
import PySimpleGUI as sg
from tkinter import *
import re
import sys
import PySimpleGUI as sg
import sqlite3
sys.path.append(os.path.dirname(__file__))
conn = dbconnect()
createFitnessClassTable(conn=conn)
conn.commit
conn.close()
I am confused because I have a similar function to create another table which is working correctly (i.e. creating the table if it doesn't exist and populating it with the data):
def createMembershipTable(conn):
'''
'''
SQL = """
CREATE TABLE IF NOT EXISTS membership (
membership_type_id integer PRIMARY KEY,
membership_type text NOT NULL,
weekly_amount real NOT NULL
);"""
sql_create = """INSERT OR IGNORE INTO membership(membership_type_id,membership_type,weekly_amount)
VALUES(?,?,?)"""
data = [(1, 'Basic', 10.00),
(2, 'Regular', 15.00),
(3, 'Premium', 20.00)
]
try:
c = conn.cursor()
c.execute(SQL)
c.close()
connection = conn.cursor()
connection.executemany(sql_create, data)
connection.close()
except Error as e:
print(e)
return False
return True
The lines to call that function:
conn = dbconnect()
createMembershipTable(conn)
conn.commit()
conn.close()
What am I missing? Why would the function createMembershipTable work as expected though the function createFitnessClassTable not work when they are both almost identical?

Just after posting (and 3 hours later), I realized the issue:
It was missing parenthesis after the conn.commit() in the createFitnessClassTable function call.

Related

How to test a flask app using pytest to get the coverage rate up

Currently this is my app.py files
# imports - standard imports
import json
import os
import sqlite3
# imports - third party imports
from flask import Flask, Response, jsonify, redirect
from flask import render_template as render
from flask import request, url_for
DATABASE_NAME = "inventory.sqlite"
# setting up Flask instance
app = Flask(__name__)
app.config.from_mapping(
SECRET_KEY="dev",
DATABASE=os.path.join(app.instance_path, "database", DATABASE_NAME),
)
# listing views
link = {x: x for x in ["location", "product", "movement"]}
link["index"] = "/"
def init_database():
db = sqlite3.connect(DATABASE_NAME)
cursor = db.cursor()
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS
products(prod_id INTEGER PRIMARY KEY AUTOINCREMENT,
prod_name TEXT UNIQUE NOT NULL,
prod_quantity INTEGER NOT NULL,
unallocated_quantity INTEGER);
"""
)
cursor.execute(
"""
CREATE TRIGGER IF NOT EXISTS default_prod_qty_to_unalloc_qty
AFTER INSERT ON products
FOR EACH ROW
WHEN NEW.unallocated_quantity IS NULL
BEGIN
UPDATE products SET unallocated_quantity = NEW.prod_quantity WHERE rowid = NEW.rowid;
END;
"""
)
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS location(loc_id INTEGER PRIMARY KEY AUTOINCREMENT,
loc_name TEXT UNIQUE NOT NULL);
"""
)
# initialize page content
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS logistics(trans_id INTEGER PRIMARY KEY AUTOINCREMENT,
prod_id INTEGER NOT NULL,
from_loc_id INTEGER NULL,
to_loc_id INTEGER NULL,
prod_quantity INTEGER NOT NULL,
trans_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(prod_id) REFERENCES products(prod_id),
FOREIGN KEY(from_loc_id) REFERENCES location(loc_id),
FOREIGN KEY(to_loc_id) REFERENCES location(loc_id));
"""
)
db.commit()
#app.route("/product", methods=["POST", "GET"])
def product() -> Response | str:
init_database()
msg = None
db = sqlite3.connect(DATABASE_NAME)
cursor = db.cursor()
cursor.execute("SELECT * FROM products")
products = cursor.fetchall()
if request.method == "POST":
prod_name = request.form["prod_name"]
quantity = request.form["prod_quantity"]
transaction_allowed = False
if prod_name not in ["", " ", None] and quantity not in ["", " ", None]:
transaction_allowed = True
if transaction_allowed:
try:
cursor.execute(
"INSERT INTO products (prod_name, prod_quantity) VALUES (?, ?)",
(prod_name, quantity),
)
db.commit()
except sqlite3.Error as e:
msg = f"An error occurred: {e.args[0]}"
else:
msg = f"{prod_name} added successfully"
if msg:
print(msg)
return redirect(url_for("product"))
return render(
"product.html",
link=link,
products=products,
transaction_message=msg,
title="Products Log",
)
and this is my test function in test_product.py. I want to test my function to get my coverage on sonarcloud to be 100%. And the pytest function below seems to have no use. I must say I am very beginner to it and I am still learning.
import requests
import app
import pytest
ENDPOINT = "http://127.0.0.1:5000/product"
app.init_database()
def test_product_GET():
response = requests.get(ENDPOINT)
assert response.status_code == 200
# assert "Products Log" in response.text
#pytest.fixture()
def test_product_POST_valid():
response = requests.post(ENDPOINT, data={"prod_name": "product1", "prod_quantity": "10"}, allow_redirects=True)
assert response.status_code == 200
# assert "product1 added successfully" in response.text
#pytest.fixture()
def test_product_POST_invalid():
response = requests.post(ENDPOINT, data={"prod_name": "", "prod_quantity": ""}, allow_redirects=True)
assert response.status_code == 200
# assert "An error occurred" in response.text
I am not sure of how to make this work wihtout using request and it does have 0% coverage and I want to test this code in sonarcloud

How can I open a db.sqlite3 file and have a look at its content?

I don't know how to open a db.sqlite3 file in reader-friendly way.
I hope the data in it would be shown in tables
Upload your file here and get the tabulated result:
http://inloop.github.io/sqlite-viewer/
OR run a Python script like below
def create_connection(db_file):
""" create a database connection to the SQLite database specified by the db_file :param db_file: database file :return: Connection object or None """ conn = None
try:
conn = sqlite3.connect(db_file) except Error as e:
print(e) return conn
def select_all_tasks(conn):
""" Query all rows in the tasks table :param conn: the Connection object :return: """
cur = conn.cursor()
cur.execute("SELECT * FROM tasks") rows = cur.fetchall() for row in rows: print(row)

Error with connectin to database using sqlite3 with python

When running following code I get error which I posted at the bottom of the post. I followed tutorial on creating databases from here.
These functions worked when creating previous databases though.
I am using jupyter notebook v 3.5.
def create_connection(db_file):
try:
conn = sqlite3.connect(db_file)
return conn
except sqlite3.Error as e:
print("Connection error: [%s]" % e)
return None
def create_table(conn, create_table_sql ):
try:
c = conn.cursor()
c.execute(create_table_sql)
except sqlite3.Error as e:
print("Connection error while creating table: [%s]" % e)
def sqlTables(db_file):
sql_create_synset_table = ''' CREATE TABLE IF NOT EXISTS table_data (
id TEXT NOT NULL,
status TEXT NOT NULL,
confidence_score INT NOT NULL,
); '''
conn = create_connection(db_file)
if conn is not None:
create_table(conn,sql_create_synset_table)
else:
print("Error! cannot create db conn.")
def upload_data(db_file):
sqlTables(db_file)
conn = create_connection(db_file)
cursor = conn.cursor()
with conn:
for i in range(len(id_list)):
s_id = id_list[i]
status = status_list[i]
conf = conf_list[i]
cursor.execute("INSERT INTO table_data(id, status, confidence_score) VALUES(?,?,?)"\
,(s_id, status, conf))
conn.commit()
upload_data("path/to/db/table.db")
Connection error while creating table: [near ")": syntax error]
---> 12 cursor.execute("INSERT INTO table_data(id, status, confidence_score) VALUES(?,?,?)" ,(sset_id, stus, conf))
OperationalError: no such table: table_data

Python sqlite3: how to quickly and cleanly interrupt long running query with, e.g., KeyboardInterrupt

Using the sqlite3 module in Python a long running query is not quickly interrupted/canceled when a SIGINT (e.g. Control-C) is received. There is an interrupt() method provided by sqlite3, but there are no examples of how to use it.
Is there a simple way to interrupt/cancel a long running query running via Python/sqlite3?
To illustrate, first generate a test database & table:
import sqlite3
from random import randint
conn = sqlite3.connect("randtable.db", 10.0)
cursor = conn.cursor()
cursor.execute("CREATE TABLE randint (id integer, rand integer)")
for i in range(1000000):
if i % 1000 == 0:
print ("{0}...".format(i))
rand = randint(0,1000000)
cursor.execute("INSERT INTO randint VALUES ({0},{1})".format(i,rand))
conn.commit()
conn.close()
Then execute a long running Python/sqlite3 script in a terminal and try to interrupt it with Control-C:
from __future__ import print_function
import sqlite3
def main():
# Long running query (pathological by design)
statement ='''
SELECT DISTINCT a.id,a.rand
FROM randint a
JOIN randint b ON a.id=b.rand
JOIN randint c ON a.id=c.rand
JOIN randint d ON a.id=d.rand
JOIN randint e ON a.id=e.rand
JOIN randint f ON a.id=f.rand
JOIN randint g ON a.id=g.rand
JOIN randint h ON a.id=h.rand
ORDER BY a.id limit 10'''
conn = sqlite3.connect('randtable.sqlite', 10.0)
cursor = conn.cursor()
print ("Executing query")
cursor.execute(statement)
rows = cursor.fetchall()
print ("ROWS:")
for row in rows:
print (" ", row)
conn.close()
return
if __name__ == "__main__":
main()
Running the above script in a terminal and then pressing Control-C (or sending SIGINT some other way) will eventually cancel the query and script but it can take quite a bit of time, many minutes. The exact same query running in the sqlite3 command line tool is near-instantly canceled when Control-C is pressed.
Thanks in advance!
Your answer covers it, but (after letting it slip my mind yesterday - sorry!) I remembered I'd promised to write an answer, so here's another version that demonstrates you can do this without globals. I've also used a threading.Event here instead of a signal to demonstrate there's a few different ways of signalling a thread that it's time to do something (but for your purposes, stick with signal because that's perfect for reacting to a Ctrl+C):
import sqlite3
import time
import threading
# Background thread that'll kill our long running query after 1 second
def kill_it(connection, event):
event.wait()
time.sleep(1)
connection.interrupt()
# Make some tables with lots of data so we can make a long running query
def big_query(conn, kill_event):
print('Making big tables')
conn.execute(
"CREATE TABLE foo (i integer primary key, s text);")
conn.execute(
"CREATE TABLE bar (j integer primary key, s text);")
conn.execute(
"INSERT INTO foo VALUES %s" % ", ".join("(%d, 'foo')" % i for i in range(10000)))
conn.execute(
"INSERT INTO bar VALUES %s" % ", ".join("(%d, 'bar')" % i for i in range(10000)))
kill_event.set()
print('Running query')
cur = conn.cursor()
cur.execute(
"SELECT * FROM foo, bar")
print(len(cur.fetchall()))
def main():
conn = sqlite3.connect('foo.db')
kill_event = threading.Event()
kill_thread = threading.Thread(target=kill_it, args=(conn, kill_event))
kill_thread.start()
big_query(conn, kill_event)
kill_thread.join()
if __name__ == '__main__':
main()
Answering my own question since I think I've worked it out. Below is what I've come up with, any comments on this code would be greatly appreciated.
#!/usr/bin/env python
from __future__ import print_function
import sqlite3
import threading
import signal
import os
import time
conn = None
shutdown = False
def main():
global conn
# Long running query (pathological by design)
statement ='''
SELECT DISTINCT a.id,a.rand
FROM randint a
JOIN randint b ON a.id=b.rand
JOIN randint c ON a.id=c.rand
JOIN randint d ON a.id=d.rand
JOIN randint e ON a.id=e.rand
JOIN randint f ON a.id=f.rand
JOIN randint g ON a.id=g.rand
JOIN randint h ON a.id=h.rand
ORDER BY a.id limit 10'''
conn = sqlite3.connect('randtable.sqlite', 10.0)
cursor = conn.cursor()
print ("Executing query")
try:
cursor.execute(statement)
except Exception as err:
if str(err) != "interrupted":
print ("Database error: {0}".format(str(err)))
return None
rows = cursor.fetchall()
print ("ROWS:")
for row in rows:
print (" ", row)
conn.close()
conn = None
return
def interrupt(signum, frame):
global conn
global shutdown
print ("Interrupt requested")
if conn:
conn.interrupt()
if __name__ == "__main__":
signal.signal(signal.SIGINT, interrupt)
mainthread = threading.Thread(target=main)
mainthread.start()
while mainthread.isAlive():
time.sleep(0.2)

Sqlite3 Do I have to attach a database on every connection?

The following piece of code creates two databases:
import sqlite3
db = 'brazil'
conn = sqlite3.connect(db+'.db')
c = conn.cursor()
qCreate = """
CREATE TABLE states
(zip_id numeric NOT NULL,
state_name text NOT NULL,
CONSTRAINT pk_brazil
PRIMARY KEY (zip_id) """
c.execute(qCreate)
conn.commit()
conn.close()
db = 'city'
conn = sqlite3.connect(db+'.db')
c = conn.cursor()
qCreate = """CREATE TABLE rio_de_janeiro
(zip_id numeric NOT NULL,
beach_name text NOT NULL,
CONSTRAINT pk_rio
PRIMARY KEY (zip_id)
"""
c.execute(qCreate)
conn.commit()
conn.close()
The following piece of code attaches the database RIO to the database BRAZIL and prints all the databases (Rio and Brazil).
db = 'brazil'
conn = sqlite3.connect(db+'.db')
c = conn.cursor()
qCreate = """ATTACH DATABASE ? AS competition """
c.execute(qCreate, ('rio.db',))
c.execute("PRAGMA database_list")
data = c.fetchall()
print data
conn.commit()
conn.close()
However the following piece of code prints only Brazil database:
db = 'brazil'
conn = sqlite3.connect(db+'.db')
c = conn.cursor()
c.execute("PRAGMA database_list")
data = c.fetchall()
print data
conn.commit()
conn.close()
The attached database is no longer attached.
The sqlite3 documentation hints on these lines:
The ATTACH DATABASE statement adds another database file to the current database connection.
Do I have to attach the database every time?
I planed to use attached databases for schemas, but maybe I should try something else?
I am using python in Pythonista App in iOS
Almost all settings you can change in SQLite apply only to the current connection, i.e., are not saved in the database file.
So you have to re-ATTACH any databases whenever you have re-opened the main database.
Using attached databases makes sense only if you must use multiple database files due to some external constraint. In most cases, you should use only a single database.
SQLite does not have schemas. If you want to emulate them with attached databases, you have to live with the limitations of that approach.

Resources