I have this python script that uses pandas read an excel sheet then to update a sqlite db it works perfectly just wondering if I can speed it up as I am at about 70k lines
"""
Created on Thu Aug 1 14:11:01 2019
@author: Shane Pitts
"""
# -*- coding: utf-8 -*-
import sqlite3
from sqlite3 import Error
import pandas as pd
import shutil
#Takes DB File path then creates connection to it
def create_connection(db_file):
""" create a database connection to a SQLite database """
try:
conn = sqlite3.connect(db_file)
return conn
print(sqlite3.version)
except Error as e:
print(e)
return None
#updates reported table
def update_dupi_reported(conn, dupi, userID, op):
with conn:
cur = conn.cursor()
cur.execute("UPDATE reported SET dupi = :dupi WHERE userID = :userID AND operator = :op",
{'dupi':dupi, 'userID':userID, 'op':op})
#updates blocked table
def update_dupi_blocked(conn, dupi, userID, op):
with conn:
cur = conn.cursor()
cur.execute("UPDATE blocked SET dupi = :dupi WHERE userID = :userID AND operator = :op",
{'dupi':dupi, 'userID':userID, 'op':op})
def Count():
#Creates a dataframe with the Excel sheet information
df = pd.DataFrame()
df = pd.read_excel("/root/Shane_db/Count.xlsx")
#Assigns a variable to the DataBase
database = "/root/Shane_db/db.db"
# create a database connection
conn = create_connection(database)
cur = conn.cursor()
#Runs through the DataFrame once for reported
for i in df.index:
userID = df['userID'][i]
dupi = df['dupi'][i]
op = df['operator'][i]
print(i)
with conn:
update_dupi_reported(conn, dupi, userID, op)
#Runs through a Second time for blocked
for x in df.index:
userID = df['userID'][x]
dupi = df['dupi'][x]
op = df['operator'][x]
print(x)
with conn:
update_dupi_blocked(conn, dupi, userID, op)
shutil.copy("/root/Shane_db/db.db", "/var/www/html/site/db.db")
if __name__ == '__main__':
Count()