We are having a little problem with a functional test with casper.js.
We request the same resource twice, first with the GET and then with POST method.
Now when waiting for the second resource (POST) it matches the first resource and directly goes to the "then" function.
We would like to be able to check for the HTTP method in the "test" function, that way we can identify the resource properly. For now we use the status code (res.status), but that doesn't solve our problem fully, we really need the http method.
// create new email
this.click(xPath('//div[#id="tab-content"]//a[#class="button create"]'));
// GET
this.waitForResource('/some/resource',
function then() {
this.test.assertExists(xPath('//form[#id="email_edit_form"]'), 'Email edit form is there');
this.fill('form#email_edit_form', {
'email_entity[email]': 'test.bruce#im.com',
'email_entity[isMain]': 1
}, true);
// POST
this.waitForResource(
function test(res) {
return res.url.search('/some/resource') !== -1 && res.status === 201;
},
function then() {
this.test.assert(true, 'Email creation worked.');
},
function timeout() {
this.test.fail('Email creation did not work.');
}
);
},
function timeout() {
this.test.fail('Email adress creation form has not been loaded');
});
Or maybe there is a better way to test this scenario? Although since this is a functional test we need to keep all those steps in one test.
You can try to alter the form action url to add some query string, therefore generating a new resource appended to the stack. Could be done this way:
casper.thenEvaluate(function() {
var form = __utils__.findOne('#email_edit_form');
form.setAttribute('action', form.getAttribute('action') + '?plop');
});
That's a hack though, and functional testing should never be achieved that way. Let's hope more information will be added to the response objects in the future.
The res parameter that is passed to the test function has an ID. I created a helper that tests against this ID and blacklists it, so the same resource won't get accepted a second time.
var blackListedResourceIds = [],
testUniqueResource = function (resourceUrl, statusCode) {
return function (res) {
// check if resource was already loaded
var resourceFound = res.url.search(resourceUrl) !== -1;
// check statuscode
if (statusCode !== undefined) {
resourceFound = resourceFound && res.status === statusCode;
}
// check blacklisting
if (!resourceFound || blackListedResourceIds[res.id] !== undefined) {
return false;
} else {
blackListedResourceIds[res.id] = true;
return true;
}
};
};
Related
I am working on expressJS and this is my code in controller:
inside file readstream i had this condition check and whenever this met i no-longer want to read file because it means that it is not in correct data format:
if (!(waveType in expectedWave && Number(startTime) >= 0 && Number(endTime) > 0)) {
return res.status(500).send(" file format not supported");
}
even though I am returning this return statement I am getting
error of cannot set headers after they are sent to the client though I am returning response
SOLUTION AND EXPLANATION
I found the solution because all over the internet the solution people suggest is to use return.
I am writing this because if someone comes again here for the same problem, he/she can understand the problem.
That error basically means that there is already status and message sent to the response like with res.status(500).send(error.message)
if your code does not end here and traverse further and encounter another return.status().send() then there comes an error because the second response object status assignment is not possible because it is already assigned.
in most of the case if we return the code execution stops but in case of event handler where there is another event triggered while returning from the function and if that event handler has response sending operation then return can't save you like in this code:
const rl = readLine.createInterface({
input: fs.createReadStream(req.file.path)
});
rl.on("line", line => {
const cols = line.split(",");
const waveType = cols[0];
const startTime = cols[1];
const endTime = cols[2];
const tags = cols.slice(3);
//just to make sure csv has valid data list
if (!(waveType in expectedWave && Number(startTime) >= 0 && Number(endTime) > 0)) {
return res.status(500).send("File not supported");
}
and down the code there is
rl.on("close", () => {
if (notSupportedFlag) {
notSupportedFlag = false
return res.status(500).send("File data is not supported format");
} else {
let heartRate = Math.floor(frequencyCollector.sumOfFrequency / frequencyCollector.cycleCount);
results.meanFrequency = heartRate;
results.maxFrequency.time += Number(req.body.time);
results.minFrequency.time += Number(req.body.time);
res.status(200).json(results);
return;
}
});
return can't save you here because when you return that response close event will be called so only thing that we can do here is to perform one response send.
Like the answer posted by #tryanner but here I will add one more thing:
// set this variable before reading with false as default/start value
let notSupportedFlag= false;
inside readline.on ("line", line =>{
if (!(waveType in expectedWave && Number(startTime) >= 0 && Number(endTime) > 0)) {
notSupportedFlag = true;
rl.close();
rl.removeAllListeners();
}
then inside close operation we will do this:
rl.on("close", () => {
if (notSupportedFlag) {
notSupportedFlag = false
return res.status(500).send("File data is not supported format");
} else {
//do whatever you do in case of successful read
return;
}
});
The problem is that the stream is still sending the response multiple times from line event, every time it reads a line.
So, you need to end the stream.
However, it doesn't seem to work like that, because:
Calling rl.close() does not immediately stop other events (including
'line') from being emitted by the InterfaceConstructor instance.
https://nodejs.org/api/readline.html#rlclose
which means the line will emit again, and cause the same error, so you cannot return response from the line event, but somewhere else.
You need to rewrite the code accordingly.
For example, add the variable to track the error manually, close the stream in line, and then check for error there, and return error, if any, success if not.
// track error
let error;
// setup reader to close it later
const reader = readLine.createInterface({
//...
if (!(waveType in expectedWave && Number(startTime) >= 0 && Number(endTime) > 0)) {
error = 'File data entry is not in supported format';
// close
reader.close()
return;
}
//...
}).on("close", () => {
if (error) {
res.status(500).send(error);
error = '';
return;
}
//...
I think in your readLine handlers when you return in its functions, it just return from those function. try with a return in behind of readLine.createInterface
Hello Stackoverflow community,
In nuts
I am wondering why the insert callback is not being called async properly as the documentation says, having a code like:
Meteor.methods({
addUpdate: function (text) {
Updates.insert({
text: text,
createdAt: new Date(),
owner_email: Meteor.user().emails[0].address,
owner_username: Meteor.user().username
}, function(e, id) {
debugger; //<-- executed first with 'e' always undefined
});
debugger; //<-- executed after
}
});
the debugger inside the callback function is executed before the debugger afterwards, if the function is async the debugger inside the callback should be called at the end right?
More info
I am very new with meteor, the thing is that I am trying to make an small app, and experimenting, by now I wanted to confirm what I had understood about some concepts in this case the "insert" method. given the following code:
lib/collections/updateCollection.js
Update = function (params, id) {
params = params || {};
// define properties for update model such as text
this._text = params.text;
}
Update.prototype = {
// define some getters and setters, such as doc
get doc() {
return {
createdAt: this.createdAt,
text: this.text,
owner_email: this.owner_email,
owner_username: this.owner_username
};
},
notify: function notify(error, id) {
var client, notification, status;
client = Meteor.isClient ? window.Website : false;
notification = (client && window.Hub.update.addUpdate) || {}
status = (!error && notification.success) || notification.error;
if (client) {
return client.notify(status);
}
}
save: function save(callback) {
var that;
that = this;
callback = callback || this.notify;
Updates.insert(that.doc, function (error, _id) {
that._id = _id;
callback(error, _id); <-- here is the deal
});
}
}
lib/methods/updateService.js
updateService = {
add: function add(text) {
var update;
update = new Update({
text: text,
createdAt: new Date(),
owner_email: Meteor.user().emails[0].address,
owner_username: Meteor.user().username
});
update.save();
},
// methods to interact with the Update object
};
lib/methods/main/methods.js
Meteor.methods({
addUpdate: function (text) {
updateService.add(text);
}
});
My expectations here is when the client do something like Meteor.call('addUpdate', text); and everything is cool, a successful message is shown, otherwise the error is "truth" and an error message is shown. What is actually happening is that the callback is always called with error undefined (like if everything es cool), the callback also is not being called async, it is just called directly.
Even when I turn off the connection the update insertion shows a success message.
Any idea? maybe my app structure is making meteor work wrong? I really do not know. Thanks in advance.
Your code is executing inside a method. On the client, methods are executed simply to simulate what the server will do before the server responds (so that the app seems more responsive). Because the DB changes here are just simulating what the server is already doing, they are not sent to the server, and therefore synchronous. On the server, all code runs inside a Fiber, so it acts synchronous. (However, Fibers run in parallel just like normal callback-soup Node.)
client.post(config.apiUrl+"cart", args, function(data,response) {
if (data.status == 'success') {
for (var key in data.data.cart_items_list) {
if (data.data.cart_items_list.hasOwnProperty(key)) {
data.data.cart_items_list[key].child.prodqty = function () {
client.get(config.apiUrl+"product/"+data.data.cart_items_list[key].child.child_id+"/quantity", function(invdata, invresponse) {
if(invdata.status == 'success'){
console.log(invdata.data);
return invdata.data;
console.log(data.data.cart_items_list[key].child.prodqty);
}
})
}
}
}
console.log(data.data.cart_items_list);
}
})
Above is piece of code I have written to get one modify the data I got from api client call.
The first api call will a json data. I am looping thru that data to get one value from another api and append to parent's json data
console.log(data.data.cart_items_list[key].child.prodqty); line prints the correct value in my logs, but console.log(data.data.cart_items_list); this is not having the newly(child.prodqty) appended value in it.
I am very much new in Node.js so I don't know whether I am doing it correct
EDIT:
if I console.log(data.data.cart_items_list); the output has prodqty in it. but its coming like this. prodqty: [Function]
For default you say that data.data.cart_items_list get prodqty: [Function]. If you get the prodqty value you need to set this value in data.data.cart_items_list, thus you can catch it after the loop. I my opinion, I prefer to create a new var when I set it:
client.post(config.apiUrl+"cart", args, function(data,response) {
var prodqty = {};
if (data.status == 'success') {
for (var key in data.data.cart_items_list) {
if (data.data.cart_items_list.hasOwnProperty(key)) {
data.data.cart_items_list[key].child.prodqty = function () {
client.get(config.apiUrl+"product/"+data.data.cart_items_list[key].child.child_id+"/quantity", function(invdata, invresponse) {
if(invdata.status == 'success'){
console.log(invdata.data);
return invdata.data;
prodqty.key = data.data.cart_items_list[key].child.prodqty; //prodqty associated with his key
console.log(data.data.cart_items_list[key].child.prodqty);
}
})
}
}
}
console.log(JSON.stringify(prodqty));
console.log(data.data.cart_items_list);
}
})
I`ve added a object prodqty. Then I set the prodqty whit his key. I this structure of code there may be a problem. The api use a callback, then when loop finish is posible that the prodqty object doesn´t set the values. For resolve it, You need pass a callback at the second api or wait a time until second api finish:
setTimeout(function(){ console.log(JSON.stringify(prodqty)); }, 3000);` //3 seconds for example
I've noticed that the size of a file requested will effect how long the response takes for ajax calls. So if I fire 3 ajax GET requests for files of varying size, they may arrive in any order. What I want to do is guarantee the ordering when I append the files to the DOM.
How can I set up a queue system so that when I fire A1->A2->A3. I can guarantee that they are appeneded as A1->A2->A3 in that order.
For example, suppose A2 arrives before A1. I would want the action to wait upon the arrival and loading of A1.
One idea is to create a status checker using a timed callback as such
// pseudo-code
function check(ready, fund) {
// check ready some how
if (ready) {
func();
} else {
setTimeout(function () {
check(ready, fund);
}, 1); // check every msec
}
}
but this seems like a resource heavy way, as I fire the same function every 1msec, until the resources is loaded.
Is this the right path to complete this problem?
status checker using a 1msec-timed callback - but this seems like a resource heavy way; Is this the right path to complete this problem?
No. You should have a look at Promises. That way, you can easily formulate it like this:
var a1 = getPromiseForAjaxResult(ressource1url);
var a2 = getPromiseForAjaxResult(ressource2url);
var a3 = getPromiseForAjaxResult(ressource3url);
a1.then(function(res) {
append(res);
return a2;
}).then(function(res) {
append(res);
return a3;
}).then(append);
For example, jQuery's .ajax function implements this.
You can try something like this:
var resourceData = {};
var resourcesLoaded = 0;
function loadResource(resource, callback) {
var xhr = new XMLHttpRequest();
xhr.onload = function() {
var state = this.readyState;
var responseCode = request.status;
if(state == this.DONE && responseCode == 200) {
callback(resource, this.responseText);
}
};
xhr.open("get", resource, true);
xhr.send();
}
//Assuming that resources is an array of path names
function loadResources(resources) {
for(var i = 0; i < resources.length; i++) {
loadResource(resources[i], function(resource, responseText) {
//Store the data of the resource in to the resourceData map,
//using the resource name as the key. Then increment the
//resource counter.
resourceData[resource] = responseText;
resourcesLoaded++;
//If the number of resources that we have loaded is equal
//to the total number of resources, it means that we have
//all our resources.
if(resourcesLoaded === resources.length) {
//Manipulate the data in the order that you desire.
//Everything you need is inside resourceData, keyed
//by the resource url.
...
...
}
});
}
}
If certain components must be loaded and executed before (like certain JS files) others, you can queue up your AJAX requests like so:
function loadResource(resource, callback) {
var xhr = new XMLHttpRequest();
xhr.onload = function() {
var state = this.readyState;
var responseCode = request.status;
if(state == this.DONE && responseCode == 200) {
//Do whatever you need to do with this.responseText
...
...
callback();
}
};
xhr.open("get", resource, true);
xhr.send();
}
function run() {
var resources = [
"path/to/some/resource.html",
"path/to/some/other/resource.html",
...
"http://example.org/path/to/remote/resource.html"
];
//Function that sequentially loads the resources, so that the next resource
//will not be loaded until first one has finished loading. I accomplish
//this by calling the function itself in the callback to the loadResource
//function. This function is not truly recursive since the callback
//invocation (even though it is the function itself) is an independent call
//and therefore will not be part of the original callstack.
function load(i) {
if (i < resources.length) {
loadResource(resources[i], function () {
load(++i);
});
}
}
load(0);
}
This way, the next file will not be loaded until the previous one has finished loading.
If you cannot use any third-party libraries, you can use my solution. However, your life will probably be much easier if you do what Bergi suggested and use Promises.
There's no need to call check() every millisecond, just run it in the xhr's onreadystatechange. If you provide a bit more of your code, I can explain further.
I would have a queue of functions to execute and each of them checks the previous result has completed before executing.
var remoteResults[]
function requestRemoteResouse(index, fetchFunction) {
// the argument fetchFunction is a function that fetches the remote content
// once the content is ready it call the passed in function with the result.
fetchFunction(
function(result) {
// add the remote result to the list of results
remoteResults[index] = result
// write as many results as ready.
writeResultsWhenReady(index);
});
}
function writeResults(index) {
var i;
// Execute all functions at least once
for(i = 0; i < remoteResults.length; i++) {
if(!remoteResults[i]) {
return;
}
// Call the function that is the ith result
// This will modify the dom.
remoteResults[i]();
// Blank the result to ensure we don't double execute
// Store a function so we can do a simple boolean check.
remoteResults[i] = function(){};
}
}
requestRemoteResouse(0, [Function to fetch the first resouse]);
requestRemoteResouse(1, [Function to fetch the second resouse]);
requestRemoteResouse(2, [Function to fetch the thrid resouse]);
Please note that this is currently O(n^2) for simplicity, it would get faster but more complex if you stored an object at every index of remoteResults, which had a hasRendered property. Then you would only scan back until you found a result that had not yet occurred or one that has been rendered.
I'm writing a callback function for the FB.Connect.showPermissionDialog() function which accepts permissions and an optional callback function.
It's supposed to be passed null if the user rejects the permissions dialog. But for some reason, my script always makes the post request even if the permissions request failed.
echo("FB.ensureInit ( function () {
FB.Connect.showPermissionDialog('email,offline_access',
function(accepted) {
if(accepted==null) {alert('failure');} else {
$.post(\"http://www.domain.com/permissions.php\",
{ username:$userID,mode:'accepted'});}
});
});");
Not sure why it's not reading the value of accepted properly. Thanks for the help.
My first guess is that the Javascript null value is not actually being returned, and some other false-like value is being returned instead.
I'd try changing to this to test for all false-like values:
if (!accepted) {alert('failure')} else {
...
Do with firebug:
console.log(accepted)
And see what you're getting back. Could be undefined, which according to your logic still passes.
Maybe you should change your logic around:
echo("FB.ensureInit ( function () {
FB.Connect.showPermissionDialog('email,offline_access',
function(accepted) {
if(accepted !== null) {
$.post(\"http://www.domain.com/permissions.php\",
{ username:$userID,mode:'accepted'});}
} else {alert('failure');}
});
});
");