variable probably outside of scope - javascript

Although the code may seem a little heavy at first (it probably is anyways), it is kinda simple. All I want to do is to read some data from a JSON file (Which is in following format:
{"news":[{}]}
)
, and push some info into an array that I get from another JSON file that comes from one of newsapi.org's servers. My problem is probably caused by hoisting, but I do not know how to fix it..! In the console it spits out Uncaught TypeError: Cannot read property 'title' of undefined
.
What follows below is a snippet from my defunct code.
(I am aware that my code looks messy :P)
function appendNews(id) {
var xhr = new XMLHttpRequest();
xhr.open("GET", "https://newsapi.org/v2/top-headlines?sources=" + id +
"&apiKey=8fdd732e76664e06b177a20fb295129c", true);
xhr.send();
xhr.addEventListener("readystatechange", processRequest, false);
xhr.onreadystatechange = processRequest();
function processRequest() {
if (xhr.readyState == 4 && xhr.status == 200) {
var response = JSON.parse(xhr.responseText);
var container = document.getElementById("newsContainer");
for (var i = 0; i < response.articles.length; i++) {
fs.readFile('./data/events/news.json', 'utf8', function(err, data) {
if (err) {
console.log(err)
} else {
var file = JSON.parse(data)
file.news.push({
"title": response.articles[i].title,
"description": response.articles[i].description,
"url": response.articles[i].url,
"urlImg": response.articles[i].urlToImage,
"time": response.articles[i].publishedAt,
"sourceAndAuthor": response.articles[i].author + " (" +
response.articles[i].source.name + ")"
})
var toStringify = JSON.stringify(file);
fs.writeFile('./data/events/news.json', toStringify, 'utf8', function(err) {
if (err) {
console.log(err);
} else {
}
});
}
});
}
}
EDIT: Further down in my code, I use the same variables, and then it works:
var holder = document.createElement("button");
holder.setAttribute("id", "newsHolder");
holder.setAttribute("onclick", "openArticle(" + "'" +
response.articles[i].title + "'" + ")")
var title = document.createElement("div");
title.setAttribute("id", "newsTitle");
title.innerHTML = response.articles[i].title;
var image = document.createElement("img");
image.setAttribute("src",response.articles[i].urlToImage)
image.setAttribute("id", "articleImage");
holder.appendChild(title);
holder.appendChild(image);
container.appendChild(holder);
fadeIn("newsContainer",1,0.01,1);

This is a major issue in your control flow:
for (var i = 0; i < response.articles.length; i++) {
fs.readFile('./data/events/news.json', 'utf8', function(err, data) {
...
var file = JSON.parse(data)
...
var toStringify = JSON.stringify(file);
fs.writeFile('./data/events/news.json', toStringify, 'utf8', function(err) {
});
});
}
Writing like this demonstrates a complete lack of understanding for asynchronous programming, as what really ends up happening is a race condition where the last successful fs.writeFile() will append only one entry from response.articles to your JSON, even if you were to fix the TypeError.
You're expecting the code to operate like this:
i=1 2 3 4 5 ... L
r r r r r
p p p p p
w w w w w
Where r, p, w stands for fs.readFile(), file.news.push() and fs.writeFile() respectively, but what really happens is this:
i=1 2 3 4 5 ... L
r r r r r ...
p p p p p ...
w w w w w ...
Your pushes and writes will occur in a non-guaranteed order, they will not necessarily be sequential.
where L is reseponse.articles.length, and you're attempting to access response.articles[i] when i === response.articles.length, so of course there will be no article there.
The first change would be to use
for (let i = 0; i < response.articles.length; i++) {
but that will not fix the race condition explained above, it will just cause i to have lexical scope, so at least you won't be accessing out of bounds.
In order to avoid unnecessary reading and writing, you can simply fs.readFile() once, loop through your articles and push them to file.news in the callback, and then fs.writeFile() after the end of the for loop. That would look something like this:
// add your event listener BEFORE sending
xhr.addEventListener('load', processRequest, false);
xhr.send();
function processRequest() {
// no need to check readyState and status if you use the `load` event
fs.readFile(..., (err, data) => {
// handle error
if (err) {
console.log(err);
return;
}
const { articles = [] } = JSON.parse(xhr.responseText);
const file = JSON.parse(data);
for (const article of articles) {
const {
title,
description,
url,
urlToImage: urlImg,
publishedAt: time,
author,
source: { name }
} = article;
file.news.push({
title,
description,
url,
urlImg,
time,
sourceAndAuthor: `${author} (${name})`
});
}
const json = JSON.stringify(file);
fs.writeFile(..., (err) => {
if (err) {
console.log(err);
return;
}
});
});
}

Related

find specific returned loop data node.js

I have this function:
var arrayOfNumbers = []
var getTexts = new cronJob('45 * * * * *', function() {
var viewConformationEmails = "select * from clients";
ibmdb.open(ibmdbconn, function(err, conn) {
if (err) return console.log(err);
conn.query(viewConformationEmails, function(err, rows) {
if (err) {
console.log(err);
} else if (!err) {
console.log("Success")
}
for (var i = 0; i < rows.length; i++) {
// arrayOfNumbers.push(rows[i].NAME)
arrayOfNumbers.push(rows[i]["PHONE_NUMBER"])
var stringg = rows[i]["MINUTE"] + " " + rows[i]["HOUR"] + " * " + "* " + "*"
var textJob = new cronJob(stringg, function() {
client.messages.create({
to: arrayOfNumbers[i],
from: '12055578708',
body: 'Your training session just finished.'
}, function(err, data) {});
}, null, true);
}
conn.close(function() {
// console.log("closed the function /login");
});
});
});
}, null, true)
what it does is loop through my DB table clients, and then what I want it to do is in the loop grab the phone number, as well as the minute and hour in which the user needs a text message sent to them. However, what I need it to do is ONLY SEND THE MESSAGE TO THE PHONE_NUMBER that has the time slot at x time.
So if the db returns 5 people, but only 1 of those people have a time slot at 9am, then how do i only save that one person, or check for it.
This is a relativley general question to fixing returned loop data so I am just trying to figure out how to fix this. Any ideas?

Get Byte Position during Upload Loop

I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle

Wait for loop ending end execute second func

I have 2 functions:
function parseFriends_new(range) {
var xml = window.XMLHttpRequest ? new XMLHttpRequest : new ActiveXObject("Microsoft.XMLHTTP");
var params = "gwt.requested=" + gwtHash + "&refId=search-d-" + Date.now() + "&d.sq=&d.o=" + range + "&d.d=d.in";
xml.open("POST", "/dk?cmd=FriendsSearch", true);
xml.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xml.setRequestHeader('TKN', TKN);
xml.onreadystatechange = function() {
if (4 == xml.readyState && 200 == xml.status) {
}
}
xml.send(params);
}
function postNoteAndTag(friendsList, repostId, trashText) {...}
I need to execute postNoteAndTag after parseFriends_new
for (var i = 0, range = 0; i < 20; i++, range += 10)
{
parseFriends_new(range);
}
//wait for loop and then execute next code
postNoteAndTag(friendsList, repostId, trashText);
How to do this?
Since parseFriends_new is asynchronous you might need to modify it so that it works with callbacks:
function parseFriends_new(range, done) {
var xml = window.XMLHttpRequest ? new XMLHttpRequest : new ActiveXObject("Microsoft.XMLHTTP");
var params = "gwt.requested=" + gwtHash + "&refId=search-d-" + Date.now() + "&d.sq=&d.o=" + range + "&d.d=d.in";
xml.open("POST", "/dk?cmd=FriendsSearch", true);
xml.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xml.setRequestHeader('TKN', TKN);
xml.onreadystatechange = function() {
if (4 == xml.readyState && 200 == xml.status) {
}
// The AJAX request finished => we can invoke the callback.
// you could also pass it some result if needed
done();
}
xml.send(params);
}
and then:
// We've got 20 AJAX requests to make at total
var count = 20;
for (var i = 0, range = 0; i < count; i++, range += 10) {
parseFriends_new(range, function() {
--count;
if (count <= 0) {
// All AJAX requests have finished executing
// you can now do whatever you intended to do:
postNoteAndTag(friendsList, repostId, trashText);
}
});
}
Bear in mind though that this is an absolutely bad practice to be hammering your web server with so many requests in a burst. It's bad for your server, it's bad for the client, it's bad for the TCP/IP.
A much better solution is to send a single AJAX request to the server with all the payload. It's far more efficient to send one large AJAX request than multiple small. Of course you might need to adapt your server side script to be able to process the request.

Conceptual: Create a counter in an external function

I've written some javascript to successfully download hundreds of files from an external site, using wget at the core.
After downloading all of the files, I would like to do some stuff with them. The issue is, the files aren't of equal size. So, the last wget formed isn't necessarily the last file downloaded, meaning I can't really tell when the last file has completed.
I do, however, know how many files there are in total, and the number associated with each wget.
I have 3 js files, [parseproducts.js] ==> [createurl.js] ==> [downloadurl.js]
Using this information, how can I tell when all of the files have been downloaded?
I tried creating a "ticker" function in another file but the function resets itself on each instance, so it doesn't work at all!
Edit: Code added Didn't do this initially because I didn't think people would want to trawl through it! I'm new to programming/javascript/node. Please let me know if there's something that I could do better (I'm sure most of it could be more efficient!)
parseproducts.js
var fs = require('fs');
var iset = require('./ticker.js');
var createurl = require('./createurl.js');
var array = [];
filename = 'productlist.txt';
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
content = data;
parseFile();
});
function parseFile() {
var stringarray = String(content).split(";");
for (var index = 0; index < stringarray.length; ++index) {
createurl(stringarray[index],index,stringarray.length);
console.log(index+'/'+stringarray.length+' sent.');
if (index === 0) {
iset(true,stringarray.length);
} else {
iset (false,stringarray.length);
}
};
};
createurl.js
function create(partnumber,iteration,total) {
var JSdownloadURL = require('./downloadurl.js');
JSdownloadURL(createurl(partnumber),partnumber,iteration,total);
function createurl(partnumber) {
var URL = ('"https://data.icecat.biz/xml_s3/xml_server3.cgi?prod_id='+partnumber+';vendor=hp;lang=en;output=productxml"');
return URL;
};
};
module.exports = create;
downloadurl.js
function downloadurl(URL,partnumber,iteration,total) {
// Dependencies
var fs = require('fs');
var url = require('url');
var http = require('http');
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var checkfiles = require('./checkfiles.js');
// App variables
var file_url = URL;
var DOWNLOAD_DIR = './downloads/';
// We will be downloading the files to a directory, so make sure it's there
var mkdir = 'mkdir -p ' + DOWNLOAD_DIR;
var child = exec(mkdir, function(err, stdout, stderr) {
if (err) throw err;
else download_file_wget(file_url);
});
// Function to download file using wget
var download_file_wget = function(file_url) {
// compose the wget command
var wget = 'wget --http-user="MyAccount" --http-password="MyPassword" -P ' + DOWNLOAD_DIR + ' ' + file_url;
// excute wget using child_process' exec function
var child = exec(wget, function(err, stdout, stderr) {
if (err) throw err;
else console.log(iteration+'/'+total+' downloaded. '+partnumber + ' downloaded to ' + DOWNLOAD_DIR);
});
};
};
module.exports = downloadurl;
Failed attempt ticker.js
function iset(bol,total) {
if (bol === true) {
var i = 0;
} else {
var i = 1;
};
counter(i, total);
}
function counter(i,total) {
var n = n + i;
if (n === (total - 1)) {
var checkfiles = require('./checkfiles.js');
checkfiles(total);
} else {
console.log('nothing done');
};
}
module.exports = iset;
Update In response to answer
This is what my code looks like now. However, I get the error
child_process.js:945
throw errnoException(process._errno, 'spawn');
^
Error: spawn EMFILE
// Dependencies
var fs = require('fs');
var url = require('url');
var http = require('http');
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var checkfiles = require('./checkfiles.js');
function downloadurl(URL,partnumber,iteration,total,clb) {
// App variables
var file_url = URL;
var DOWNLOAD_DIR = './downloads/';
// We will be downloading the files to a directory, so make sure it's there
var mkdir = 'mkdir -p ' + DOWNLOAD_DIR;
var child = exec(mkdir, function(err, stdout, stderr) {
if (err) throw err;
else download_file_wget(file_url);
});
var child = exec(mkdir, function(err, stdout, stderr) {
if (err) {
clb(err);
} else {
var wget = 'wget --http-user="amadman114" --http-password="Chip10" -P ' + DOWNLOAD_DIR + ' ' + file_url;
// excute wget using child_process' exec function
var child = exec(wget, function(err, stdout, stderr) {
if (err) {
clb(err);
} else {
console.log(iteration+'/'+total+' downloaded. '+partnumber + ' downloaded to ' + DOWNLOAD_DIR);
clb(null); // <-- you can pass more args here if you want, like result
// as a general convention callbacks take a form of
// callback(err, res1, res2, ...)
}
});
}
});
};
function clb() {
var LIMIT = 100,
errs = [];
for (var i = 0; i < LIMIT; i++) {
downloadurl(URL,partnumber,iternation,total, function(err) {
if (err) {
errs.push(err);
}
LIMIT--;
if (!LIMIT) {
finalize(errs);
}
});
}
}
function finalize(errs) {
// you can now check for err
//or do whatever stuff to finalize the code
}
module.exports = downloadurl;
OK, so you have this function downloadurl. What you need to do is to pass one more argument to it: the callback. And please, move requirements outside the function and don't define a function in a function unless necessary:
var fs = require('fs');
// other dependencies and constants
function downloadurl(URL,partnumber,iteration,total, clb) { // <-- new arg
// some code
var child = exec(mkdir, function(err, stdout, stderr) {
if (err) {
clb(err);
} else {
var wget = 'wget --http-user="MyAccount" --http-password="MyPassword" -P ' + DOWNLOAD_DIR + ' ' + file_url;
// excute wget using child_process' exec function
var child = exec(wget, function(err, stdout, stderr) {
if (err) {
clb(err);
} else {
console.log(iteration+'/'+total+' downloaded. '+partnumber + ' downloaded to ' + DOWNLOAD_DIR);
clb(null); // <-- you can pass more args here if you want, like result
// as a general convention callbacks take a form of
// callback(err, res1, res2, ...)
}
});
}
});
};
This look nicer, doesn't it? Now when you call that function multiple times you do:
var LIMIT = 100,
errs = [];
for (var i = 0; i < LIMIT; i++) {
downloadurl(..., function(err) {
if (err) {
errs.push(err);
}
LIMIT--;
if (!LIMIT) {
finalize(errs);
}
});
}
function finalize(errs) {
// you can now check for err
//or do whatever stuff to finalize the code
}
That's a general idea. You have to tweak it to your needs (in particular you have to modify the intermediate function to accept a callback as well). Of course there are libraries which will take care of most this for you like kriskowal's Q (Q.all) or caolan's async (async.parallel).
Not sure if I have understood the problem correctly as I don't see the code. I have worked on creating a download engine. I used to make background AJAX calls to download files. After every successful download or 'onComplete' event I used to increment one variable to keep track of downloaded files. Provdided user won't refresh the page till all the download is complete. Else the download counter can be saved in LocalStorage also.

Node.js - spawn gzip process

I'm trying to gzip some data using Node.js...
Specifically, I have data in 'buf' and I want to write a gzipped form of this to 'stream'.
Here is my code:
c1.on('data',function(buf){
var gzip = spawn('gzip', ['-' + (compressionRate-0),'-c', '-']);
gzip.stdin.write(buf);
gzip.stdout.on('data',function(data){
console.log(data);
stream.write(data,'binary');
});
});
The trouble is, it simply won't work! I'm not sure of the exact syntax for spawning processes and piping data to them.
Any help greatly appreciated.
Many thanks in advance,
Edit: here is the original working code where I got the idea from. The project is at: https://github.com/indutny/node.gzip
Can anyone work out how to do this spawning in node.js cos I'm totally stuck!
var spawn = require('child_process').spawn,
Buffer = require('buffer').Buffer;
module.exports = function (data) {
var rate = 8,
enc = 'utf8',
isBuffer = Buffer.isBuffer(data),
args = Array.prototype.slice.call(arguments, 1),
callback;
if (!isBuffer && typeof args[0] === 'string') {
enc = args.shift();
}
if (typeof args[0] === 'number') {
rate = args.shift() - 0;
}
callback = args[0];
var gzip = spawn('gzip', ['-' + (rate - 0), '-c', '-']);
var promise = new
process.EventEmitter,
output = [],
output_len = 0;
// No need to use buffer if no
callback was provided
if (callback) {
gzip.stdout.on('data', function (data) {
output.push(data);
output_len += data.length;
});
gzip.on('exit', function (code) {
var buf = new Buffer(output_len);
for (var a = 0, p = 0; p < output_len; p += output[a++].length) {
output[a].copy(buf, p, 0);
}
callback(code, buf);
});
}
// Promise events
gzip.stdout.on('data', function (data) {
promise.emit('data', data);
});
gzip.on('exit', function (code) {
promise.emit('end');
});
if (isBuffer) {
gzip.stdin.encoding = 'binary';
gzip.stdin.end(data.length ? data : '');
} else {
gzip.stdin.end(data ? data.toString() : '', enc);
}
// Return EventEmitter, so node.gzip can be used for streaming
// (thx #indexzero for that tip)
return promise;
};
Why don't you simply use the gzip node library that you are "inspired from" instead of copying the code?
var gzip = require('gzip');
c1.on('data' function(buf){
gzip(buf, function(err, data){
stream.write(data, 'binary');
}
}
Should work using the library. To install it simply type npm install gzip in your terminal.
Do you need to call the 'end' method on gzip.stdin? I.e.:
gzip.stdin.write(buf);
gzip.stdout.on('data',function(data){
console.log(data);
stream.write(data,'binary');
});
gzip.stdin.end();

Categories