I got data from a SQL Server database and am saving the data to a variable in json format. I would certainly like to create a json file straight into Cloud Storage without having to create a json file on my local machine. It's possible?
I was able to create a json file on my local machine but I would like to disregard this step by creating a json object in the script instead of the file on the local machine and then creating a file directly into Cloud Storage.
#Libraries
import io, json, codecs
import pyodbc
import logging
import os
import cloudstorage as gcs
import gcloud
from gcloud import storage
from google.cloud import bigquery
import pandas as pd
from gcloud.bigquery import SchemaField
import os
from googleapiclient import discovery
#Path variable
try:
script_path = os.path.dirname(os.path.abspath(__file__)) + "/"
except:
script_path = "C:\\Users\\userl\\Documents\\auth.json"
#Bigquery Credentials and settings
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = script_path
db = pyodbc.connect("DRIVER={SQL Server};server=10.0.1.1;database=blablabla;uid=xpto;pwd=xxx")
cursor = db.cursor()
SQLview = 'select * from test'
result = cursor.execute(SQLview)
items = [dict(zip([key[0] for key in cursor.description], row)) for row in result]
json_file = json.dumps(items, ensure_ascii=False).encode('utf-8')
client = storage.Client(project='big-data')
bucket = client.get_bucket('big-data-bkt')
blob = bucket.blob(json_file)
with open('dict_obj.json', 'rb') as f:
blob.upload_from_file(f)