I'm writing webpage with a javascript to read data files in text format from the server per user request. Once the text file has been loaded, I need to manipulate the data somewhat.
I have been using XMLHttpRequest for the loading, however, now I see that synchronous requests are "deprecated". I can't start manipulating the data before it's loaded, so what can I do in this case?
Use an asynchronous request (or fetch, see below, which is also asynchronous):
function doGET(path, callback) {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
// The request is done; did it work?
if (xhr.status == 200) {
// ***Yes, use `xhr.responseText` here***
callback(xhr.responseText);
} else {
// ***No, tell the callback the call failed***
callback(null);
}
}
};
xhr.open("GET", path);
xhr.send();
}
function handleFileData(fileData) {
if (!fileData) {
// Show error
return;
}
// Use the file data
}
// Do the request
doGET("/path/to/file", handleFileData);
Or using promises, which are the more modern way to handle callbacks (but keep reading):
function doGET(path, callback) {
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
// The request is done; did it work?
if (xhr.status == 200) {
// Yes, use `xhr.responseText` to resolve the promise
resolve(xhr.responseText);
} else {
// No, reject the promise
reject(xhr);
}
}
};
xhr.open("GET", path);
xhr.send();
});
}
// Do the request
doGET("/path/to/file")
.then(function(fileData) {
// Use the file data
})
.catch(function(xhr) {
// The call failed, look at `xhr` for details
});
Here in 2019, there's no reason to use XHR wrapped in a promise like that, just use fetch:
function doGET(url) {
return fetch(url).then(response => {
if (!response.ok) {
throw new Error("HTTP error " + response.status); // Rejects the promise
}
});
}
Since you want to handle the local file, Try this
Make use of XMLHttpRequest
function readFile(file)
{
var f = new XMLHttpRequest();
f.open("GET", file, false);
f.onreadystatechange = function ()
{
if(f.readyState === 4)
{
if(f.status === 200 || f.status == 0)
{
var res= f.responseText;
alert(res);
}
}
}
f.send(null);
}
Then you have to call with File:\\
readFile('File:\\\yourpath');
Related
I am using Async XMLHttpRequest to make an API call. Here's the workflow of my program,
first_function(){
var valueToBeReturned = 0;
makeRequest(myCallback)//function for API call
/*rest of the code*/
console.log("print second");
return valueToBeReturned;
}
function makeRequest(callback){
var xhttp = new XMLHttpRequest();
xhttp.open("GET", "my_url", true);
xhttp.send(null);
xhttp.onload = function() {
if(xhttp.readyState === 4) {
if(xhttp.status === 200) {
response = JSON.parse(xhttp.responseText);
callback(null, response);
}
}
}
}
function myCallback(data){
console.log("print first")
}
Now what happens is every time I run it, the entire code in the first function is executed and then the code in makeRequest is executed. I understand JS is synchronous in nature and everything. But I'm not able to get my work done here, which is fisrt it makes API call, then callback is executed, then the code after makeRequest. What am I doing wrong here?
PS this is not the actual code, just to demonstrate the flow of my program
You need to put callback as a parameter in makeRequest. I'm not sure what that null is there for, though. If you want "print second" to print second, you'll need to execute it after myCallback - maybe insert another callback?
function first_function(){
var valueToBeReturned = 0;
makeRequest(myCallback, restOfTheCode)
function restOfTheCode() {
/*rest of the code*/
console.log("print second");
}
}
function makeRequest(callback1, callback2){
var xhttp = new XMLHttpRequest();
xhttp.open("GET", "my_url", true);
xhttp.send(null);
xhttp.onload = function() {
if(xhttp.readyState === 4 && xhttp.status === 200) {
const response = JSON.parse(xhttp.responseText);
callback1(response);
callback2(response);
}
}
}
function myCallback(data){
console.log("print first");
}
But this whole thing would be a whole lot nicer if you used Fetch and Promises instead:
function makeRequest() {
return fetch('my_url')
.then(response => response.JSON())
}
// consume the request:
makeRequest()
.then(responseJSON => {
// do stuff with the responseJSON
});
I am kinda new to Javascript and have written some Chrome Extension. The code contains a sequence process where one function is being passed to another and will be called when the first is done. To be frank, this is getting complicated for me to even look at.
I will explain with an example that shows the sequence I have in my code:
function successFunc() {
console.log("Success!");
}
function handleErrorFunc(successFunc) {
... stuff to handle error...
step1(successFunc);
}
function step1(successFunc) {
var url = ...
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.send();
request.onerror = function() {
...
}
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
...
step2(successFunc)
}
}
}
function step2(successFunc) {
var url = ...
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.send();
request.onerror = function() {
...
}
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
...
successFunc()
}
}
}
Now I call
step1(successFunc);
Is there something to prevent this from happening, some design pattern perhaps?
Many javascript libraries return a Promise from an ajax call. This is a preferable method to using callbacks as it neatens your code considerable.
Instead of the code you currently have, your code would instead look like this.
step1().then(step1Result => step2(step1Result))
.catch(err => console.error("step1 faled",err));
You can continue to chain this as appropriate
step1().then(step1Result => step2(step1Result)
.then(step2Result => step3(step2Result))
.catch(err => console.error("step2 failed",err))
)
.catch(err => console.error("step1 faled",err));
Using the standard XMLHttpRequest does not follow this pattern, but it is straightforward to wrap in a Promise if thats what you want to do - however it is much easier to use an existing ajax library which supports promises.
I would suggest you start by reading Using Promises guide.
edit: If you want to wrap your calls in an Promise make sure you pass back the response as well as the errors. eg:
function ajaxGet(url){
return new Promise((resolve, reject) => {
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.send();
request.onerror = reject; // will pass back error
request.onreadystatechange = function() {
if (request.readyState == XMLHttpRequest.DONE && request.status == 200) {
resolve(request.responseText);
}
}
});
}
Then step1 might be
function step1(){
return ajaxGet("url/for/step1");
}
What you want to look at is promises in Javascript. But before you dive into something that specific I would recommend you get a basic understanding of functional programming in Javascript.
A few links to help you out in this case are given below -
Functional Programming in JS
Promises in JS
A better approach to your problem is to use promises :
function successFunc() {
console.log("Success!");
}
// now returns a promise and does not need successFunc
function step1() {
return new Promise((resolve, reject) => {
const request = new XMLHttpRequest();
request.open("GET", 'your/url', true);
request.send();
request.onerror = function() {
reject();
}
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
resolve();
}
}
});
}
// now returns a promise and does not need successFunc
function step2() {
return new Promise((resolve, reject) => {
var request = new XMLHttpRequest();
request.open("GET", 'your/url', true);
request.send();
request.onerror = function() {
reject();
}
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
resolve();
}
}
});
}
function handleErrorFunc(step) {
if (step === 1) {
// handle step1 errors...
}
else if (step === 2) {
// handle step2 errors...
}
}
// chain your step calls
step1()
.then(() => {
step2()
.then(() => {
successFunc();
})
.catch(() => {
handleErrorFunc(2);
});
})
.catch(() => {
handleErrorFunc(1);
});
while (repoUrlLink != null && count < 90) {
var xmlHttp = new XMLHttpRequest();
xmlHttp.open('GET', repoUrlLink, false);
xmlHttp.setRequestHeader('Authorization', 'Bearer ' + userData.accessToken);
xmlHttp.onload = function () {
if (xmlHttp.status != 200) {
displayNoAccessMessage();
break;
}
var result = JSON.parse(xmlHttp.responseText);
if (result.length == 0) {
displayNoRecordsMessage();
break;
}
var header = xmlHttp.getResponseHeader('link');
if (header) {
//doing something
}
else
repoUrlLink = null;
$.each(result, function (index, eachData) {
// doing something with data
count++;
});
}
xmlHttp.send();
}
Is there any better way to come out of the loop as soon as i display error.The break statement is not working. Is there any callback which can be useful ?
As someone said in the comments, you'll want to use recursion. This is because xmlHttp is an asynchronous operation. When you call send, the browser will send off the request, then continue on with the code, so by the time the onload function is called, it's no longer valid to break out of the loop. Also, being a function, onload is in no position to call break, since the while loop isn't even in the same scope.
var count = 0;
var doXmlHttpCall = function() {
var xmlHttp = new XMLHttpRequest();
// additional setup code here
xmlHttp.onload = function() {
var errorFound = false;
if (/* some error case */) {
errorFound = true;
}
// process request
if (count < 90 && !errorFound) {
doXmlHttpCall();
}
}
}
doXmlHttpCall();
Some ideas to refactor the code using promises.
a promisified XMLHttpRequest request function getRepLink that performs one request. It rejects for request errors and HTTP errors (not "200" status).
a promisifed getSetOfRecords function to get a single response, parse it as JSON data and extract the link header value. It rejects iff there are no records.
a promisified process records function which tries to process a given number of records in sets. It fulfills with the number of records processed. It ignores the no records error if some records have already been processed.
// XMLHttp request
function getRepoLink (repUrlLink) {
return new Promise( function (resolve, reject) {
var xmlHttp = new XMLHttpRequest();
xmlHttp.open('GET', repoUrlLink, false);
xmlHttp.setRequestHeader('Authorization', 'Bearer ' + userData.accessToken);
xmlHttp.onload = function () {
if( xmlHttp.status == 200) {
resolve( xmlHttp);
}
else {
reject( new Error("HTTP error " + xmlHttp.status));
}
};
xmlHttp.onerror = reject;
xmlHttp.send();
});
}
// get a set of records
const EndOfRecords = new Error( "End of Records");
function getSetOfRecords( repUrlLink) {
return getRepoLink( repUrlLink).then(
function( xmlHttp) {
var result = JSON.parse(xmlHttp.responseText);
if (result.length == 0) {
displayNoRecordsMessage();
throw EndOfRecords; // reject the returned promise
}
var header = xmlHttp.getResponseHeader('link');
return {result, header}; // fulfill the returned promise
}
);
}
// get up to `count` records and process them
function processRecords( repUrlLink, count) {
var processed = 0;
function processSomeMore() {
return getSetOfRecords().then( function ({result, header}) {
$.each(result, function (index, eachData) {
// do something with data
processed++;
});
if( header) {
//do something with header
if( processed < count)
return processSomeMore() // asynchronous "recursion"
}
else {
// do as required if no link header present.
}
return processed; // fulfill returned promise
},
function( error) {
if( error === EndOfRecords && processed > 0) {
return processed; // got some records
};
throw error; // reject returned promise
});
}
return processSomeMore();
}
// Start asynchronous operation
processRecords( repUrlLink, 90)
.then( processed => console.log( processed + " records processed"))
.catch( error => console.log( error));
I have a function that loads my html templates asynchronously:
loadTplAsync: function(path) {
return Q.Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open("GET", path, true);
xhr.onload = () => {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
resolve(_.template(xhr.responseText));
} else {
reject(xhr.responseText);
}
}
};
xhr.onerror = error => reject(error);
xhr.send(null);
});
}
How to extend this function to cache responses by browser?
Assuming that what you mean by cache is not to repeat making same request during life cycle of that page load you could store the promise as a variable and return the same promise each time.
The first time a specific path is requested will make a new request, subsequent times only the stored promise will be returned
var promises ={};
loadTplAsync: function(path) {
// create new promise if it doesn't already exist for path instance
if(!promises[path]){
promises[path] = Q.Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open("GET", path, true);
xhr.onload = () => {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
resolve(_.template(xhr.responseText));
} else {
reject(xhr.responseText);
}
}
};
xhr.onerror = error => reject(error);
xhr.send(null);
});
}
// return the stored promise
return promises[path];
}
Note this is not a persistent cache and new requests would be made on subsequent page loads
I'm trying to chain some API calls before setting the textContent of some spans in my webpage. I can execute the following ajax API calls separately by pasting them into the console, but when I chain them as promises I get getFirstData() is undefined.
var first_data = [],
second_data = [];
function getFirstData(){
var xhr = new XMLHttpRequest();
var url = "/API/first-data?format=json"
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
first_data = JSON.parse(xhr.responseText);
return Promise.resolve('1');
}
}
xhr.open("GET", url, true);
xhr.send();
}
/*getSecondData is the same, but with a different API url. I'll DRY these
two into one function that takes a url argument when I get it working.*/
getFirstData().then(getSecondData).then(createPage);
This is between <script> tags just before </body>. So what's wrong with the call to getFirstData() on the last line that causes the interpreter to say it's undefined? For reference, in the network log, getSecondData() is sent and returns just fine.
(Note: I'm specifically trying to do this without JQuery).
The issue occurs because your function is returning undefined (in other words, getting to the end of the function block before it returns) before it ever gets a chance to return Promise.resolve('1').
Your function has to immediately return a Promise object, which becomes pending before eventually resolving inside your AJAX handler.
I'd also add error handling using the provided reject argument, as is standard for Promise objects.
function getFirstData(){
return new Promise(function(resolve, reject) { // encapsulate code in a promise which returns immediately
var xhr = new XMLHttpRequest();
var url = "/echo/json"
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
first_data = JSON.parse(xhr.responseText);
return resolve('1');
}
else {
return reject('There was an error!') // reject the promise if error occurs
}
}
xhr.open("GET", url, true);
xhr.send();
});
}
And then catch it in the thenable chain:
getFirstData()
.then(getSecondData)
.catch(function(err){Throw err}) // catch the error if it throws
.then(createPage);
See working jsfiddle
getFirstData isn't returning a promise it returns undefined, which is not thenable.
function getFirstData(){
return new Promise(function(resolve) {
var xhr = new XMLHttpRequest();
var url = "/API/first-data?format=json"
xhr.onreadystatechange = function() {
if (xhr.readyState == 4 && xhr.status == 200) {
first_data = JSON.parse(xhr.responseText);
resolve('1');
}
}
xhr.open("GET", url, true);
xhr.send();
});
}