I've written some scripts to convert the pagination (20 photos per ajax request) from instagram json feeds to csv for easily storing the photo urls in our database. Our CMS is automatically able to convert CSV files into SQl files either by replacing the table or by appending to it. The problem is it will only work if ALL of the columns are the same.
It's close to totally working but I can't import my generated csvs because they keep getting an empty column where it should be line breaking to a new row because the final CSV output contains a comma + line break when it should only be returning the line break (i.e. without a trailing comma).
Encoding is UTF-8 and line breaks are being added using "\n". I've tried console logging just about every step of the process and it seems that there that
Here's a picture of one of the CSVs I am generating: http://screencast.com/t/dZfqN08A
Below is all the relevant code:
First I'm using ajax with a jsonp callback to load instagram photos based on a hashtag.
Photos are loaded like this:
function loadNext(nextUrl) {
$.ajax({
url: url,
cache: false,
type: 'POST',
dataType: "jsonp",
success: function(object) {
console.log('loadmore');
if (object) {
console.log(object);
$('.loadmore').fadeOut(500);
// chargement photos gallerie
$.each( object.data, function(home, photo) {
photo = '<div class="photo photo-load">' +
'<img class="pull-me" src="' + photo.images.low_resolution.url + '" height="380px" width="380px" alt="photo">' +
'<div class="dot"></div>' +
'<div class="share" >' +
'<!-- AddThis Button BEGIN -->' +
'<div class="addthis_toolbox addthis_default_style addthis_16x16_style">' +
'<a class="addthis_button_twitter"></a>' +
'<a class="addthis_button_facebook"></a>' +
'</div>' +
'<!-- AddThis Button END -->' +
'</div>' +
'<div class="text-photo">' +
'<div class="svg line w-line"></div>' +
'<h4 class="left">'+ photo.user.username + '</h4>' +
'<h4 class="right share-photo">PARTAGE</h4>' +
'</div>' +
'<div class="vote w-path-hover">' +
'<div class="fb-like" data-href="http://dev.kngfu.com/maurice/" data-layout="box_count" data-action="like" data-show-faces="false" data-share="false"></div>' +
'<div class="insta-like">' +
'<div class="count-box">' +
'<p>'+ photo.likes.count + '</p>' +
'</div>' +
'<a class="insta-button" title="Pour appuyer votre proposition préférée, rendez-vous sur Instagram." href="http://instagram.com/" ><i class="fa fa-instagram"></i>J aime</a>' +
'</div> ' +
'<div class="w-path"></div>' +
'<div class="base-cross"></div>' +
'<h4 class="vote-button">VOTE</h4>' +
'</div>' +
'</div>';
$(photo).appendTo( $( ".gallery" ) );
});
url = object.pagination.next_url;
console.log(url);
} else {
console.log("error");
}
} // end success func.
});
}
Then in a separate ajax call I can convert the same json feed to a csv file using this function (this function also calls a couple other functions so the dependent functions are included below the ajax call):
function convertJSON (nextUrl) {
$.ajax({
url: url,
cache: false,
type: 'POST',
dataType: "jsonp",
success: function(object) {
if (object) {
console.log(object);
var fromJSON = new Array();
i = 0;
$.each( object.data, function(home, photo) {
i++;
var photopath = photo.images.low_resolution.url;
var postID = photo.id;
var userID = photo.user.id;
var user = photo.user.username;
// watch out for those wild fullnames in instagram json
var fullname = photo.user.full_name;
fullname = fullname.replace(/[^a-z0-9]+|\s+/gmi, " ");
//console.log(fullname);
var likes = photo.likes.count;
var winner = 0;
var winnerplace = " ";
var campaign = "maurice1";
var timestamp = photo.created_time;
// easydate field formatting
var date = new Date();
date.setSeconds( timestamp );
var photodeleted = 0;
// add new rows to csv
var linebreak = "\n";
var arrayFromJSON = new Array( linebreak+photopath,
postID,
userID,
user,
fullname,
likes,
winner,
winnerplace,
campaign,
timestamp,
date,
photodeleted );
fromJSON[i] = arrayFromJSON.join();
});
//url = object.pagination.next_url;
//console.log(url);
//console.log( fromJSON );
makeCSV( fromJSON );
} else {
console.log("error");
}
} // end success func.
});
}
// json to csv converter
function makeCSV (JSONData) {
//console.log("makeCSV() function was started");
var data = encodeURIComponent(JSONData);
var currentTime = new Date().getTime();
var date = getDate( currentTime );
//console.log(JSONData);
var fileName = date;
var uri = "data:text/csv;charset=utf-8," // sets mime/data type
+ "photopath," // now 12 strings which are the CSV's column titles
+ "postid,"
+ "userid,"
+ "username,"
+ "fullname,"
+ "likes,"
+ "winner,"
+ "winnerplace,"
+ "campaign,"
+ "creationdate,"
+ "easydate,"
+ "photodeleted"
+ data; // finally append our URI encoded data
console.log(uri);
// generate a temp <a /> tag that will auto start our download when the function is called
var link = document.createElement("a");
link.id = new Date().getTime();
link.href = uri;
// link visibility hidden
link.style = "visibility:hidden";
link.download = fileName + ".csv";
// append anchor tag and click
$("div#hidden").append(link);
link.click();
//document.body.removeChild(link);
}
// this function just makes human readable dates for CSV filename and id of our link tag
function getDate() {
var date = new Date();
//zero-pad a single zero if needed
var zp = function (val){
return (val <= 9 ? '0' + val : '' + val);
}
//zero-pad up to two zeroes if needed
var zp2 = function(val){
return val <= 99? (val <=9? '00' + val : '0' + val) : ('' + val ) ;
}
var d = date.getDate();
var m = date.getMonth() + 1;
var y = date.getFullYear();
var h = date.getHours();
var min = date.getMinutes();
var s = date.getSeconds();
var ms = date.getMilliseconds();
return '' + y + '-' + zp(m) + '-' + zp(d) + ' ' + zp(h) + 'h' + zp(min) + 'm' + zp(s) + 's';
}
From all the console logging I've done, I can definitely assure you that I'm getting no trailing comma until the final step where the json array data gets URI encoded.
Since this extra column is also included in the header row I'm wondering if it has to do with this line?
var uri = "data:text/csv;charset=utf-8," // sets mime/data type
I've also tried ISO-8859-1 encoding but I get the same result.
Does anyone know why this is happening? Any help would be appreciated