I am trying to convert a JSON of 50K lines ( 4MB) into a SQL database.I am trying this approach in the code below but the function crashes before it finishes.I am not good at SQL so I do not know if there is any other approach I can follow for this purpose. I think is because of the big ammount of lines that take a lot of time for being executed. The format of my dto is something like this:
{"Data":[ {"name":"Variable A","value":0.321721,"timecreated":"2018-1-15T11:10:7.977Z"}, {"name":"Variable B","value":-8.932533,"timecreated":"2018-1-15T11:10:8.17Z"}, {"name":"Variable C","value":-7.068326,"timecreated":"2018-1-15T11:10:8.58Z"}, {"name":"Variable A","value":-3.580420,"timecreated":"2018-1-15T11:10:8.98Z"},
....50K lines
{"name":"Variable C","value":1.549976,"timecreated":"2018-1-15T11:10:7.977Z"}, {"name":"Variable A","value":-8.701625,"timecreated":"2018-1-15T11:10:8.17Z"}]}
public static async Task RunAsync([BlobTrigger("Container/Device/{name}", Connection = "AzureWebJobsStorage")]Stream myBlob, string name, TraceWriter log)
{
log.Info($"C# Blob trigger Entered\n Name:{name} \n Size: {myBlob.Length} Bytes");
var sr = new StreamReader(myBlob);
string strReadBlob = sr.ReadToEnd();
var dto = JsonConvert.DeserializeObject<Object50KElement>(strReadBlob);
using (SqlConnection conn = new SqlConnection(cnnString))
{
foreach (Variable v in dto)
{
//Send to SQL
conn.Execute(XXXXXXX);
}
}
}