Using a cronjob on node for storing a json file, parsing it and update a database
Clash Royale CLAN TAG#URR8PPP
.everyoneloves__top-leaderboard:empty,.everyoneloves__mid-leaderboard:empty margin-bottom:0;
up vote
0
down vote
favorite
I need to retrieve a file from a website every minute that contains boats location.
Then, I parse the file, delete all the boats from my database and re-create all of them at the new position indicated in the file.
Here's my code so far - in the server.js of my loopbackjs app:
var CronJob = require('cron').CronJob;
var exec = require("child_process").exec;
var reload = require('require-reload')(require);
var app = module.exports = loopback();
// Bootstrap the application, configure models, datasources and middleware.(loopbackjs mandatory code)
boot(app, __dirname, function (err)
if (err)
throw err;
var Boat = app.models.Boat;
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXXXXXXXX&format=1&output=json&compress=0&latmin=40&latmax=52&lonmin=-5&lonmax=10';
//start the cronjob on a node env every min
new CronJob('* * * * *', function ()
// get the file and save as a .json
exec('wget "' + ais_url_france + '" -O aishub_france.json', function (error, stdout, stderr)
// I need to use reload here because the file is stored in memory otherwise and only read once after the first download...
var file = reload('../aishub_france.json');
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(typeId: 'aishub_boats', function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
,
typeId: 'aishub_boats'
);
);
);
, null, true, null);
);
I'm not sure the code here is so great but here are my troubles:
- The downloaded file is huge (1,3Mo)
- I need to remove and re-add all the boats because they change location and some might appear/disappear (maybe there is a better solution)
- Not sure using require-reload is the best solution, but if I don't the file is read only once in the cronjob even if the donwloaded file changed each time...
I think I might have some memory leak because I'm having strange stacks trace recently after I added this code. Here is the trace:
33503717 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1404.6) MB, 672.5 / 0.0 ms [allocation failure] [GC in old space requested].
33504750 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1404.6) MB, 847.3 / 0.0 ms [allocation failure] [GC in old space requested].
33505597 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1403.6) MB, 846.4 / 0.0 ms [last resort gc].
33506438 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1403.6) MB, 841.4 / 0.0 ms [last resort gc].
javascript node.js
add a comment |Â
up vote
0
down vote
favorite
I need to retrieve a file from a website every minute that contains boats location.
Then, I parse the file, delete all the boats from my database and re-create all of them at the new position indicated in the file.
Here's my code so far - in the server.js of my loopbackjs app:
var CronJob = require('cron').CronJob;
var exec = require("child_process").exec;
var reload = require('require-reload')(require);
var app = module.exports = loopback();
// Bootstrap the application, configure models, datasources and middleware.(loopbackjs mandatory code)
boot(app, __dirname, function (err)
if (err)
throw err;
var Boat = app.models.Boat;
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXXXXXXXX&format=1&output=json&compress=0&latmin=40&latmax=52&lonmin=-5&lonmax=10';
//start the cronjob on a node env every min
new CronJob('* * * * *', function ()
// get the file and save as a .json
exec('wget "' + ais_url_france + '" -O aishub_france.json', function (error, stdout, stderr)
// I need to use reload here because the file is stored in memory otherwise and only read once after the first download...
var file = reload('../aishub_france.json');
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(typeId: 'aishub_boats', function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
,
typeId: 'aishub_boats'
);
);
);
, null, true, null);
);
I'm not sure the code here is so great but here are my troubles:
- The downloaded file is huge (1,3Mo)
- I need to remove and re-add all the boats because they change location and some might appear/disappear (maybe there is a better solution)
- Not sure using require-reload is the best solution, but if I don't the file is read only once in the cronjob even if the donwloaded file changed each time...
I think I might have some memory leak because I'm having strange stacks trace recently after I added this code. Here is the trace:
33503717 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1404.6) MB, 672.5 / 0.0 ms [allocation failure] [GC in old space requested].
33504750 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1404.6) MB, 847.3 / 0.0 ms [allocation failure] [GC in old space requested].
33505597 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1403.6) MB, 846.4 / 0.0 ms [last resort gc].
33506438 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1403.6) MB, 841.4 / 0.0 ms [last resort gc].
javascript node.js
1
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46
add a comment |Â
up vote
0
down vote
favorite
up vote
0
down vote
favorite
I need to retrieve a file from a website every minute that contains boats location.
Then, I parse the file, delete all the boats from my database and re-create all of them at the new position indicated in the file.
Here's my code so far - in the server.js of my loopbackjs app:
var CronJob = require('cron').CronJob;
var exec = require("child_process").exec;
var reload = require('require-reload')(require);
var app = module.exports = loopback();
// Bootstrap the application, configure models, datasources and middleware.(loopbackjs mandatory code)
boot(app, __dirname, function (err)
if (err)
throw err;
var Boat = app.models.Boat;
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXXXXXXXX&format=1&output=json&compress=0&latmin=40&latmax=52&lonmin=-5&lonmax=10';
//start the cronjob on a node env every min
new CronJob('* * * * *', function ()
// get the file and save as a .json
exec('wget "' + ais_url_france + '" -O aishub_france.json', function (error, stdout, stderr)
// I need to use reload here because the file is stored in memory otherwise and only read once after the first download...
var file = reload('../aishub_france.json');
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(typeId: 'aishub_boats', function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
,
typeId: 'aishub_boats'
);
);
);
, null, true, null);
);
I'm not sure the code here is so great but here are my troubles:
- The downloaded file is huge (1,3Mo)
- I need to remove and re-add all the boats because they change location and some might appear/disappear (maybe there is a better solution)
- Not sure using require-reload is the best solution, but if I don't the file is read only once in the cronjob even if the donwloaded file changed each time...
I think I might have some memory leak because I'm having strange stacks trace recently after I added this code. Here is the trace:
33503717 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1404.6) MB, 672.5 / 0.0 ms [allocation failure] [GC in old space requested].
33504750 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1404.6) MB, 847.3 / 0.0 ms [allocation failure] [GC in old space requested].
33505597 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1403.6) MB, 846.4 / 0.0 ms [last resort gc].
33506438 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1403.6) MB, 841.4 / 0.0 ms [last resort gc].
javascript node.js
I need to retrieve a file from a website every minute that contains boats location.
Then, I parse the file, delete all the boats from my database and re-create all of them at the new position indicated in the file.
Here's my code so far - in the server.js of my loopbackjs app:
var CronJob = require('cron').CronJob;
var exec = require("child_process").exec;
var reload = require('require-reload')(require);
var app = module.exports = loopback();
// Bootstrap the application, configure models, datasources and middleware.(loopbackjs mandatory code)
boot(app, __dirname, function (err)
if (err)
throw err;
var Boat = app.models.Boat;
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXXXXXXXX&format=1&output=json&compress=0&latmin=40&latmax=52&lonmin=-5&lonmax=10';
//start the cronjob on a node env every min
new CronJob('* * * * *', function ()
// get the file and save as a .json
exec('wget "' + ais_url_france + '" -O aishub_france.json', function (error, stdout, stderr)
// I need to use reload here because the file is stored in memory otherwise and only read once after the first download...
var file = reload('../aishub_france.json');
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(typeId: 'aishub_boats', function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
,
typeId: 'aishub_boats'
);
);
);
, null, true, null);
);
I'm not sure the code here is so great but here are my troubles:
- The downloaded file is huge (1,3Mo)
- I need to remove and re-add all the boats because they change location and some might appear/disappear (maybe there is a better solution)
- Not sure using require-reload is the best solution, but if I don't the file is read only once in the cronjob even if the donwloaded file changed each time...
I think I might have some memory leak because I'm having strange stacks trace recently after I added this code. Here is the trace:
33503717 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1404.6) MB, 672.5 / 0.0 ms [allocation failure] [GC in old space requested].
33504750 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1404.6) MB, 847.3 / 0.0 ms [allocation failure] [GC in old space requested].
33505597 ms: Mark-sweep 1288.0 (1404.6) -> 1288.0 (1403.6) MB, 846.4 / 0.0 ms [last resort gc].
33506438 ms: Mark-sweep 1288.0 (1403.6) -> 1288.0 (1403.6) MB, 841.4 / 0.0 ms [last resort gc].
javascript node.js
edited Jan 24 at 13:09
Julien Rousé
446416
446416
asked Jan 24 at 9:31
F3L1X79
1116
1116
1
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46
add a comment |Â
1
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46
1
1
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46
add a comment |Â
1 Answer
1
active
oldest
votes
up vote
1
down vote
accepted
OK, thank you James!
I was able to get rid of require-reload using request, and use the compression with adm-zip on the fly:
var CronJob = require('cron').CronJob;
var request = require('request');
var AdmZip = require('adm-zip');
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXX&format=1&output=json&compress=1&latmin=40&latmax=52&lonmin=-5&lonmax=10';
new CronJob('* * * * *', function ()
request(url: ais_url_france, encoding: null, function (error, response, data)
if (!error && response.statusCode === 200)
var zip = new AdmZip(data);
var zipEntries = zip.getEntries();
if (zipEntries && zipEntries.length)
var file = JSON.parse(zip.readAsText(zipEntries[0]));
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
);
);
);
, null, true, null);
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passingcompress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?
â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
 |Â
show 1 more comment
1 Answer
1
active
oldest
votes
1 Answer
1
active
oldest
votes
active
oldest
votes
active
oldest
votes
up vote
1
down vote
accepted
OK, thank you James!
I was able to get rid of require-reload using request, and use the compression with adm-zip on the fly:
var CronJob = require('cron').CronJob;
var request = require('request');
var AdmZip = require('adm-zip');
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXX&format=1&output=json&compress=1&latmin=40&latmax=52&lonmin=-5&lonmax=10';
new CronJob('* * * * *', function ()
request(url: ais_url_france, encoding: null, function (error, response, data)
if (!error && response.statusCode === 200)
var zip = new AdmZip(data);
var zipEntries = zip.getEntries();
if (zipEntries && zipEntries.length)
var file = JSON.parse(zip.readAsText(zipEntries[0]));
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
);
);
);
, null, true, null);
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passingcompress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?
â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
 |Â
show 1 more comment
up vote
1
down vote
accepted
OK, thank you James!
I was able to get rid of require-reload using request, and use the compression with adm-zip on the fly:
var CronJob = require('cron').CronJob;
var request = require('request');
var AdmZip = require('adm-zip');
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXX&format=1&output=json&compress=1&latmin=40&latmax=52&lonmin=-5&lonmax=10';
new CronJob('* * * * *', function ()
request(url: ais_url_france, encoding: null, function (error, response, data)
if (!error && response.statusCode === 200)
var zip = new AdmZip(data);
var zipEntries = zip.getEntries();
if (zipEntries && zipEntries.length)
var file = JSON.parse(zip.readAsText(zipEntries[0]));
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
);
);
);
, null, true, null);
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passingcompress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?
â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
 |Â
show 1 more comment
up vote
1
down vote
accepted
up vote
1
down vote
accepted
OK, thank you James!
I was able to get rid of require-reload using request, and use the compression with adm-zip on the fly:
var CronJob = require('cron').CronJob;
var request = require('request');
var AdmZip = require('adm-zip');
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXX&format=1&output=json&compress=1&latmin=40&latmax=52&lonmin=-5&lonmax=10';
new CronJob('* * * * *', function ()
request(url: ais_url_france, encoding: null, function (error, response, data)
if (!error && response.statusCode === 200)
var zip = new AdmZip(data);
var zipEntries = zip.getEntries();
if (zipEntries && zipEntries.length)
var file = JSON.parse(zip.readAsText(zipEntries[0]));
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
);
);
);
, null, true, null);
OK, thank you James!
I was able to get rid of require-reload using request, and use the compression with adm-zip on the fly:
var CronJob = require('cron').CronJob;
var request = require('request');
var AdmZip = require('adm-zip');
var ais_url_france = 'http://data.aishub.net/ws.php?username=XXX&format=1&output=json&compress=1&latmin=40&latmax=52&lonmin=-5&lonmax=10';
new CronJob('* * * * *', function ()
request(url: ais_url_france, encoding: null, function (error, response, data)
if (!error && response.statusCode === 200)
var zip = new AdmZip(data);
var zipEntries = zip.getEntries();
if (zipEntries && zipEntries.length)
var file = JSON.parse(zip.readAsText(zipEntries[0]));
if (file[0].ERROR === false)
var boats = file[1];
if (boats.length > 0)
Boat.destroyAll(function (err, info)
for (var i = 0; i < boats.length; i++)
Boat.create(
name: boats[i].NAME,
latlng:
lat: boats[i].LATITUDE,
lng: boats[i].LONGITUDE
);
);
);
, null, true, null);
edited Feb 6 at 15:21
answered Jan 31 at 9:28
F3L1X79
1116
1116
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passingcompress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?
â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
 |Â
show 1 more comment
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passingcompress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?
â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passing
compress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?â James
Feb 1 at 12:24
Glad to hear youâÂÂve made some progress, so IâÂÂm noticing you are passing
compress=0
to the API - this definitely isnâÂÂt a flag to determine whether the response should be compressed?â James
Feb 1 at 12:24
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
Yes it is. But I wasn't able to manage the .gzip in memory after that. Too bad, but at least the memory leak is gone!
â F3L1X79
Feb 1 at 12:28
1
1
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
In what sense? If youâÂÂre happy with what you now have then no worries but with a 1.3Mb download thatâÂÂs still gona have a significant latency problem (depending on your download speed & upload speed of the target server). Compression might add extra latency too though so itâÂÂs about weighing up the pros / cons.
â James
Feb 1 at 13:52
1
1
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
You are right, I'll definitely check this problem again - but the most important part is solved - thank to you! The .gzip file is 315ko. If I get some times soon, I'll update my post here of what causes me troubles.
â F3L1X79
Feb 1 at 14:42
1
1
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
That's approx a 10th of the size of the original file download - sounds like it'd be worth the time to sort.
â James
Feb 1 at 15:30
 |Â
show 1 more comment
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fcodereview.stackexchange.com%2fquestions%2f185860%2fusing-a-cronjob-on-node-for-storing-a-json-file-parsing-it-and-update-a-databas%23new-answer', 'question_page');
);
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
1
Are you using any sort of compression for the file download? Secondly, does the file need to be written to disk? Could you stream from the remote location instead and handle directly in memory? Those are areas IâÂÂd be looking at.
â James
Jan 29 at 1:42
Thank you! I'll try to make this changes tomorrow and tell you if it runs smoother.
â F3L1X79
Jan 29 at 13:46