Suppose I've got a script to load a web page in PhantomJS.
var page = require('webpage').create()
var url = 'http://localhost:3000/hello.html'
page.open(url, function (status) {
var content = page.content;
console.log(content);
phantom.exit();
});
I'd like to know how much time it takes to load the page resources. So I modified my script like this:
var page = require('webpage').create()
var dict = {}
page.onResourceRequested = function (req) {
dict[req.url] = new Date().getTime()
};
page.onResourceReceived = function (res) {
if (res.stage == "end") {
dict[res.url] = new Date().getTime() - dict[res.url];
}
};
var url = 'http://localhost:3000/hello.html'
page.open(url, function (status) {
var content = page.content;
console.log(JSON.stringify(dict))
phantom.exit();
});
Is there a better way to measure the request time?
The way you're measuring time is totally fine, but a resource doesn't always have a unique URL. Multiple resources can have the same URL which would produce wrong results in your case. You should be using the id property of a request:
page.onResourceRequested = function (req) {
dict[req.id] = new Date().getTime()
};
page.onResourceReceived = function (res) {
if (res.stage == "end") {
dict[res.id] = new Date().getTime() - dict[res.id];
}
};
Note that there is the netsniff.js script in the examples directory which already does a lot of stuff for you.
Related
I have a server that consistently updates a JSON file. Tho the code I setup bellow (javascript) reads the JSON file and shows it to the client always refreshes the page.
I would like to know how I would be able to read my JSON file every time it's updated without refreshing the web page.
From what I looked up, it seems that I'll have to use AJAX to get this done. But couldn't find much else. Or should I make any update on my JSON file?
This is the index.html code I'm using to get the data from my archive.json file:
<script>
fetch('archive.json')
.then(function (response) {
return response.json();
})
.then(function (data) {
appendData(data);
})
.catch(function (err) {
console.log('error: ' + err);
});
function appendData(data) {
console.log(data.velas.length);
var mainContainer = document.getElementById("myData");
for (var i = 0; i < data.velas.length; i++) {
var div = document.createElement("div");
div.innerHTML = 'Tempo: ' + data.velas[i].tempo;
mainContainer.appendChild(div);
}
}
</script>
Thanks for any help!
You can try this, i have explained everything in code comment
<script>
const REFRESH_INTERVAL = 5; // 5 seconds
var mainContainer = document.getElementById("myData");
async function appendData() {
// make the request
const response = await fetch('archive.json');
const data = await response.json();
// check the data response
if(data == undefined && data == null) return
// manipulate DOM
var mainContainer = document.getElementById("myData");
mainContainer.innerHTML = '' // clear the content of the previous mainContainer
for (var i = 0; i < data.velas.length; i++) {
var div = document.createElement("div");
div.innerHTML = 'Tempo: ' + data.velas[i].tempo;
mainContainer.appendChild(div);
}
}
setInterval(() => { // send a request to server each 5 seconds (REFRESH_INTERVAL) <- you can change it here
appendData()
}, REFRESH_INTERVAL * 1000)
</script>
I hope I understood your question correctly.
document.querySelector('button').addEventListener('click',()=>{
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function () {
if (this.readyState == 4 && this.status == 200) {
document.querySelector('.get').innerHTML=`${JSON.parse(this.responseText).ip}`
};
}
xhttp.open("GET", "https://ipapi.co/json/", true);
xhttp.send();
})
<button>Get</button>
<div class="get">
</div>
Just acknowledge my mistake of it refreshing not because of JSON nor AJAX codes... but because I was running it on a local server. So it would refresh anyway.
But adilson's respond to the topic with a timer completed what I needed:
https://stackoverflow.com/a/68090848/16179486
Thanks for the help guys!
I wasn't in charge of the Apache configuration, so I'm not sure what I can provide in terms of useful conf text, but I'm fairly certain I have narrowed the problem down to the login. EventSource works flawlessly both locally on XAMPP without any login and once you refresh the page after authenticating on the production server, but that first load on the server just will not open a connection. Has anyone seen this problem before? I couldn't find anything on the internet about this after searching for the past few days.
Edit: Some code
Some of the server-side code (which mostly shouldn't be relevant):
header('Content-Type: text/event-stream');
header('Cache-Control: no-cache');
$client_stream = new RedisStream();
$client_stream->poll(1); //The loop, with sleep time as a parameter
The JavaScript:
var xhttpViewSet;
var xhttpSearch;
var view = 'tile';
var search = '';
var seed_url = '/core/seed_view.php';
var stream_url = '/core/stream.php';
var default_class = 'panel-default';
var success_class = 'panel-success';
var warning_class = 'panel-warning';
var danger_class = 'panel-danger';
function UpdateClient(c_name, c_obj) {
if ((c_element = document.getElementById(c_name)) !== null) {
c_element.classList.remove('text-muted');
c_element.classList.remove(default_class);
c_element.classList.remove(success_class);
c_element.classList.remove(warning_class);
c_element.classList.remove(danger_class);
switch (c_obj['status']) {
case 0:
c_obj['status'] = 'OK';
c_element.classList.add(success_class)
break;
case 1:
c_obj['status'] = 'Warning';
c_element.classList.add(warning_class)
break;
case 2:
c_obj['status'] = 'Critical';
c_element.classList.add(danger_class)
break;
default:
c_obj['status'] = 'Unknown';
c_element.classList.add(danger_class)
break;
}
for (i in c_obj) {
var var_nodes = c_element.getElementsByClassName(i);
if (var_nodes.length > 0) {
for (var j = var_nodes.length - 1; j >= 0; j--) {
var_nodes[j].innerHTML = c_obj[i];
}
}
}
}
}
function SetView() {
var view_url = seed_url + '?search=' + search + '&view=' + view;
xhttpViewSet.open('GET', view_url, true);
xhttpViewSet.send();
}
var main = function() {
container = document.getElementById('content');
if (new XMLHttpRequest()) {
xhttpViewSet = new XMLHttpRequest();
xhttpSearch = new XMLHttpRequest();
} else {
xhttpViewSet = new ActiveXObject('Microsoft.XMLHTTP');
xhttpSearch = new ActiveXObject('Microsoft.XMLHTTP');
}
var stream = new EventSource(stream_url);
stream.onopen = function() {
console.log('Connection opened.'); //This doesn't fire
}
stream.onmessage = function(e) {
var c_obj = JSON.parse(e.data);
UpdateClient(c_obj.name, c_obj.value);
};
xhttpViewSet.onreadystatechange = function() {
if (xhttpViewSet.readyState == 4) {
var resp = xhttpViewSet.responseText;
if (xhttpViewSet.status == 200 && resp.length > 0) {
container.innerHTML = resp;
if (view == 'list') {
$('#computer-table').DataTable({
"lengthMenu": [[25, 50, 100], [25, 50, 100]]
});
}
} else {
container.innerHTML = '<error>No computers matched your search or an error occured.</error>';
}
}
}
SetView(); //This successfully does all but make the EventSource connection, and only fails to do that on first load
document.getElementById('list-view').addEventListener('click', function() {
view = 'list';
SetView();
});
document.getElementById('tile-view').addEventListener('click', function() {
view = 'tile';
SetView();
});
document.getElementById('search').addEventListener('keyup', function() {
search = this.value.toUpperCase();
SetView();
});
document.getElementById('clear-search').addEventListener('click', function() {
document.getElementById('search').value = '';
search = '';
SetView();
});
};
window.onload = main;
It is a bit hard to know for sure without a lot more information, but based on what you have said so far, I think it is one of:
HEAD/OPTIONS: Some browsers will send a HEAD or OPTIONS http call to a server script, before they send the GET or POST. The purpose of sending OPTIONS is to ask what headers are allowed to be sent. It is possible this is happening as part of the login process; that might explain why it works when you reload. See chapter 9 of Data Push Apps with HTML5 SSE (disclaimer: my book) for more details; basically, at the top of your SSE script you need to check the value of $_SERVER["REQUEST_METHOD"] and if it is "OPTIONS", intercept and say what headers you want to accept. I've used this one before:
header("Access-Control-Allow-Headers: Last-Event-ID,".
" Origin, X-Requested-With, Content-Type, Accept,".
" Authorization");`
CORS: The HTML page URL and the SSE page URL must have identical origins. There are detailed explanations (specific to SSE) in chapter 9 of Data Push Apps with HTML5 SSE (again), or (less specifically) at Wikipedia. If this is the problem, look into adding header("Access-Control-Allow-Origin: *"); to your SSE script.
withCredentials: There is a second parameter to the SSE constructor, and you use it like this: var stream = new EventSource(stream_url, { withCredentials: true }); It is saying it is okay to send the auth credentials. (Again, chapter 9 of the book goes into more detail - sorry for the repeated plugs!) There is a second step, over on the server-side: at the top of your PHP SSE script you need to add the following.
header("Access-Control-Allow-Origin: ".#$_SERVER["HTTP_ORIGIN"]);
header("Access-Control-Allow-Credentials: true");
PHP Sessions locking: This normally causes the opposite problem, which is that the SSE script has locked the PHP session, so no other PHP scripts work. See https://stackoverflow.com/a/30878764/841830 for how to handle it. (It is a good idea to do this anyway, even if it isn't your problem.)
The problem is, some sites contain the request to test.com/test.aspx and some don't.
If the request exists, it should print the JSON and exit.
If the request does not exist, it should exit too - at the moment, it stays open in this case.
Also, how could I make the code better? Maybe even faster if that's possible?
My JS code:
var Url = "http://www.test.de";
var params = new Array();
var webPage = require('webpage');
var page = webPage.create();
var targetJSON = {};
page.open(Url);
page.onResourceRequested = function(requestData, networkRequest) {
var match = requestData.url.match(/test.com\/test.aspx/g);
if (match != null) {
var targetString = decodeURI(JSON.stringify(requestData.url));
var klammerauf = targetString.indexOf("{");
var jsonobjekt = targetString.substr(klammerauf, (targetString.indexOf("}") - klammerauf) + 1);
targetJSON = (decodeURIComponent(jsonobjekt));
console.log(targetJSON);
phantom.exit();
}
};
I tried to add
} else {
phantom.exit();
}
and
} if (match == null) {
phantom.exit();
}
but nothing solves my problem.
If you want to check whether something doesn't exist, then you need to check all things to see if they are not it or as first-order logic: .
You first need to see all requests to see whether your intended request was there. For example like this:
var found = false;
page.onResourceRequested = function(requestData, networkRequest) {
var match = requestData.url.match(/test.com\/test.aspx/g);
if (match != null) {
var targetString = decodeURI(JSON.stringify(requestData.url));
var klammerauf = targetString.indexOf("{");
var jsonobjekt = targetString.substr(klammerauf, (targetString.indexOf("}") - klammerauf) + 1);
targetJSON = (decodeURIComponent(jsonobjekt));
console.log(targetJSON);
found = true;
phantom.exit();
}
};
page.open(Url, function(){
setTimeout(function(){
console.log("found: " + found); // will always print "false"
phantom.exit();
}, 1000);
});
I solved this with a global variable which denotes whether the request was found. If it wasn't, then you can exit PhantomJS. If wait until the page is loaded and an additional waiting time in case there are Ajax requests.
I am building an add-on for Firefox that redirect request to a new URL if the URL match some conditions. I've tried this, and it does not work.
I register an observer on HTTP-on-modify-request to process the URL, if the URL match my condition, I will redirect to a new URL.
Here is my code:
var Cc = Components.classes;
var Ci = Components.interfaces;
var Cr = Components.results;
var newUrl = "https://google.com";
function isInBlacklist(url) {
// here will be somemore condition, I just use youtube.com to test
if (url.indexOf('youtube.com') != -1) {
return true;
}
return false;
}
exports.main = function(options,callbacks) {
// Create observer
httpRequestObserver =
{
observe: function (subject, topic, data) {
if (topic == "http-on-modify-request") {
var httpChannel = subject.QueryInterface(Ci.nsIHttpChannel);
var uri = httpChannel.URI;
var domainLoc = uri.host;
if (isInBlacklist(domainLoc) === true) {
httpChannel.cancel(Cr.NS_BINDING_ABORTED);
var gBrowser = utils.getMostRecentBrowserWindow().gBrowser;
var domWin = channel.notificationCallbacks.getInterface(Ci.nsIDOMWindow);
var browser = gBrowser.getBrowserForDocument(domWin.top.document);
browser.loadURI(newUrl);
}
}
},
register: function () {
var observerService = Cc["#mozilla.org/observer-service;1"].getService(Ci.nsIObserverService);
observerService.addObserver(this, "http-on-modify-request", false);
},
unregister: function () {
var observerService = Cc["#mozilla.org/observer-service;1"].getService(Ci.nsIObserverService);
observerService.removeObserver(this, "http-on-modify-request");
}
};
//register observer
httpRequestObserver.register();
};
exports.onUnload = function(reason) {
httpRequestObserver.unregister();
};
I am new to Firefox add-on development.
You can redirect a channel by calling nsIHttpChannel.redirectTo.
This is not possible once the channel is opened, but in http-on-modify-request it will work.
So in your code, you can do something like:
Cu.import("resource://gre/modules/Services.jsm");
// ...
if (condition) {
httpChannel.redirectTo(
Services.io.newURI("http://example.org/", null, null));
}
It looks like you might be using the Add-on SDK. In that case, read up on Using Chrome Authority.
You could simply do a
httpChannel.URI.spec = newUrl;
instead of
httpChannel.cancel(Cr.NS_BINDING_ABORTED);
...
browser.loadURI(newUrl);
Not sure how 'safe' it would be in your case, since I'm not exactly sure how other headers in the request (e.g. Cookie) would be manipulated when you change the URL to point to an entirely different domain at this stage.
I am trying to implement a solution where by using PhantomJS a web location is open evaluated and the output is saved to a file for processing. Specifically the scanning for malicious scripts. I have been able to implement the solution using PhantomJS running once. For example this works perfectly...
var system = require('system');
var page = require('webpage').create();
var lastReceived = new Date().getTime();
var requestCount = 0;
var responseCount = 0;
var requestIds = [];
var fileSystem = require('fs');
var startTime = new Date().getTime();
page.onResourceReceived = function (response) {
if(requestIds.indexOf(response.id) !== -1) {
lastReceived = new Date().getTime();
responseCount++;
requestIds[requestIds.indexOf(response.id)] = null;
}
};
page.onResourceRequested = function (request) {
if(requestIds.indexOf(request.id) === -1) {
requestIds.push(request.id);
requestCount++;
}
};
page.open('http://adserver.example.com/adserve/;ID=164857;size=300x250;setID=162909;type=iframe', function () {});
var checkComplete = function () {
// We don't allow it to take longer than 5 seconds but
// don't return until all requests are finished
if((new Date().getTime() - lastReceived > 300 && requestCount === responseCount) || new Date().getTime() - startTime > 5000) {
clearInterval(checkCompleteInterval);
console.log(page.content);
phantom.exit();
}
}
var checkCompleteInterval = setInterval(checkComplete, 1);
However, I have had immense difficulty trying to create and automated system that doesn't require PhantomJS to continually be restarted which has a fair bit of overhead.
I tried using a named pipe to read from and then attempt to open the passed url, but for some reason it will not open properly. I would love and deeply appreciate any guidance on this.
One thing to mention is that PhantomJS excels in HTTP communications. That's why for advanced features & better performance, I always use resource pooling pattern + webserver module. This module is still tagged EXPERIMENTAL, but I have always found it quite stable until now.
So, I think the best in your case it's better to communicate via HTTP than via files IO.
Here is a very basic example :
var page = require('webpage').create();
var server = require('webserver').create();
var system = require('system');
var host, port;
if (system.args.length !== 2) {
console.log('Usage: server.js <some port>');
phantom.exit(1);
} else {
port = system.args[1];
var listening = server.listen(port, function (request, response) {
var page=require('webpage').create();
page.open(request.post.target, function(status){
response.write("Hello "+page.title);
response.close();
});
});
if (!listening) {
console.log("could not create web server listening on port " + port);
phantom.exit();
}
//test only
var url = "http://localhost:" + port + "/";
console.log("SENDING REQUEST TO:");
console.log(url);
var data='target=http://stackoverflow.com/';
page.open(url,'post', data, function (status) {
if (status !== 'success') {
console.log('FAIL to load the address');
} else {
console.log("GOT REPLY FROM SERVER:");
console.log(page.content);
}
phantom.exit();
});
}