google apps script - Importing CSV in GAS - too slow -
i have script imports csv file. works, it's slow in opinion. since csv file pretty big script exceeds maximum execution time.
does have suggestions on how increase speed of code?
fyi: due s3 authentication cannot use built-in sheets function.
function myfunction() { var liquiditybalance = spreadsheetapp.openbyurl(liquiditybalancesheet).getsheetbyname('liquidity'); var s3 = s3.getinstance(awsaccesskeyid, awssecretkey); var froms3 = s3.getobject(bucket, file); var data = utilities.parsecsv(froms3.getdataasstring(), '\t'); var csv = [] liquiditybalance.clearcontents(); (var = 0; < data.length; i++) { var row = [] (var = 0; < data[i].length; a++) { row.push(data[i][a]); } liquiditybalance.appendrow(row); } }
hopefully following code run faster. code assembles data , writes assembled data in 1 action instead of appending rows 1 @ time. if code still times out, need run partial amounts of data , know left off last continue next batch.
function myfunction() { var a,csv,data,froms3,i,l,l2,lastrow,liquiditybalance, outerarray,row,s3,thisrow; liquiditybalance = spreadsheetapp.openbyurl(liquiditybalancesheet).getsheetbyname('liquidity'); s3 = s3.getinstance(awsaccesskeyid, awssecretkey); froms3 = s3.getobject(bucket, file); data = utilities.parsecsv(froms3.getdataasstring(), '\t'); csv = []; lastrow = liquiditybalance.getlastrow(); liquiditybalance.clearcontents(); outerarray = []; l = data.length; l2 = data[0].length;//number of elements in inner array (i = 0; < l; i++) { row = [];//reset on every loop thisrow = data[i]; (a = 0; < l2; a++) { row.push(thisrow[a]); } outerarray.push(thisrow); } liquiditybalance.getrange(lastrow + 1, 1, outerarray.length, l2).setvalues(outerarray); }
Comments
Post a Comment