I am currently looping through a json response and inserting each row one by one.
This is very slow even for a few thousand rows of data insert.
What is the most efficient way to insert the data?
Here is my code.
from module import usr, pwd, acct, db, schem, api_key
import snowflake.connector
import datetime
end_point = 'users'
def snowflake_connect():
global cursor, mydb
mydb = snowflake.connector.connect(
user=usr,
password=pwd,
account=acct,
database=db,
schema=schem,
)
def snowflake_insert(id, activated, name):
global cursor
snowflake_connect()
cursor = mydb.cursor()
sql_insert_query = """ INSERT INTO USERS(ID, ACTIVATED, NAME) VALUES (%s, %s, %s)"""
insert_tuple = (id, activated, name)
cursor.execute(sql_insert_query, insert_tuple)
return cursor
def get_users():
url = 'https://company.pipedrive.com/v1/{}?&api_token={}'.format(end_point,api_key)
response = requests.request("GET", url).json()
read_users(response)
def read_users(response):
for data in response['data']:
id = data['id']
activated = data['activated']
name = data['name']
snowflake_insert(id, activated, name)
if __name__ == "__main__":
snowflake_truncate()
get_users()
cursor.close()
COPY INTOcommand. Here's a thread from the Snowflake support forums about this exact process. support.snowflake.net/s/question/0D50Z000090I9u4SAC/…