Create a request waiting-list on NodeJs - javascript

I'd like to build a NodeJS server that responds to requests just one at the time.
Basically: by doing fetch('<domain>/request/<id> I want that untill the client received the data the other requests are queued. Is it possible?

An npm module like express-queue could work.
var express = require('express');
var queue = require('express-queue');
var app = express();
app.use(queue({
activeLimit: 1
}));
app.use("*", function(req, res, next) {
setTimeout(function() {
res.send("OK");
}, 2000);
});
app.listen(3000);

Related

Is this the correct way to share a instance of a Redis client in Express?

Due to I have the application routes dedined in separated scripts I need a way to share the same Redis client across all of them. Is this the correct way of doing it ?
app.js
const express = require('express');
const app = express();
var redis = require("redis"),
client = redis.createClient();
app.use((req, res, next) => {
req.redis = client;
next();
});
// load routes
app.listen(3000, () => console.log('Example app listening on port 3000!'));
... other js file
app.get('/xyz', (req, res) => {
var redis = red.redis;
// use redis
});
You're un-necessarily passing your client instance to every request regardless of if it's needed.
I use a pretty standard controller which you can then require() as needed, example below.
var redis = require('redis')
.createClient(...);
redis.on('connect', function() {
console.log('Redis server online.');
});
module.exports = redis;
Then whenever you want to access it
var redis = require('./app/controllers/redis');
// redis => Redis client instance

How to get json-server, when used as module, to delay responses?

json-server allows one to configure responses to be delayed via the command line:
json-server --port 4000 --delay 1000 db.json
How does one try to do this when using json-server as a module? The following doesn't work:
const jsonServer = require('json-server')
var server = jsonServer.create();
server.use(jsonServer.defaults());
server.use(jsonServer.router("db.json"));
server.use(function(req, res, next) {
setTimeout(next, 1000);
});
server.listen(4000);
It just ignores the setTimeout function completely and doesn't execute it.
The order is important. Middlewares should be before the router. If you move your timeout before server.use(jsonServer.router("db.json")); it should work.
Here is my working example:
var app = jsonServer.create();
var router = jsonServer.router(path.join(__dirname, '../../test/api/dev.json'));
var middlewares = jsonServer.defaults();
app.use(function(req, res, next){
setTimeout(next, 10000);
});
app.use(middlewares);
app.use(router);
server = app.listen(3000, function () {
console.log('JSON Server is running on localhost:3000');
done();
});

How To Response To telegram bot Webhook Request? Same Request Are Coming Repeatedly

I am trying to make a telegram bot(for learning purpose) with nodejs using official telegram bot api. I set a webhook to heroku.
I am able to reply the request but after some time the same request come again after some time. Is it normal to get same request or I did not response to the coming request. when I call the getwebhookinfo method it shows pending_update_count but my code did response to all request coming from webhook.
I use this to reply to the coming requests
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
var config = require('./lib/config');
var request = require('request');
var port = process.env.PORT || 3000;
var reply_url = "https://api.telegram.org/bot"+config.bot_token;
app.use(bodyParser.json());
app.get('/',function(req,res) {
res.send("Working");
request({
url: "https://api.telegram.org/bot"+config.bot_token+'/getMe',
json : true
}, (err,res,body)=>{
console.log(body);
});
});
app.post('/'+config.bot_token , (req,res)=>{
var body = req.body;
console.log(body);
console.log(body.message.entities);
request.post((reply_url+'/sendMessage'),{form:{chat_id:body.message.chat.id,text:"POST REPLY SUCCESS",reply_to_message_id:body.message.message_id}});
});
app.listen(port, () =>
{
console.log("Server is Started at - "+port);
});
try adding next in the callback function of the API function(req, res, next) and call next() function after you do res.status(201).send('Working").
Similar applies to other POST API ('/'+config.bot_token); in the success and error callback of /sendMessage API, call res.status().send() and then next();
Always call next() as a standard practice in working with express.js

cookie doesn't get sent on first request

I've got the following code on my server.js express application:
var express = require('express');
var fallback = require('express-history-api-fallback');
var compress = require('compression');
var favicon = require('serve-favicon');
var prerenderio = require('prerender-node');
var config = require('getconfig');
var app = express();
app.use(function (req, res, next) {
if (config.environment !== 'local') {
res.cookie('config', JSON.stringify(config), { secure: true });
}
else {
res.cookie('config', JSON.stringify(config));
}
next();
});
app.get('/versioncheck', function(req, res) {
return res.json({ version: config.version });
});
app.use(compress());
app.use(prerenderio.set('prerenderToken', config.prerender_token).set('protocol', 'https'));
app.use(express.static(__dirname, { maxAge: 31536000 }));
app.use(favicon(__dirname + '/favicon.ico'));
app.use(fallback('index.html', { root: __dirname }));
const PORT = process.env.PORT || 1010;
app.listen(PORT, function () {
console.log('Server started on port %s', PORT);
});
The first middleware I'm setting up with express is quite simple: It sends down a cookie to the client with some config information. The issue I'm facing is that this cookie doesn't get sent down to the client upon first request. For all subsequent requests it does work fine. What am I missing?
I had a similar problem some time ago.
At the begin of the first request, the cookie does not exist.
The cookies get sent during the request, not before.
So, at the begin of the second request, the cookie is set (from request 1).

Ignore header validation for HTTP requests in Node

I am building a proxy server which is supposed to forward data from an Shoutcast server to the client. Using request or even Node's http module this fails due to missing HTTP header:
{ [Error: Parse Error] bytesParsed: 0, code: 'HPE_INVALID_CONSTANT' }
The URL in question is: http://stream6.jungletrain.net:8000
Doing a header request with curl I was able to verify this:
$ curl -I http://stream6.jungletrain.net:8000
curl: (52) Empty reply from server
Yet the stream is working fine as tested with curl stream6.jungletrain.net:8000.
Is there a way to disable the header verification in request or Node's http? This is the code I am testing it on:
var express = require('express');
var request = require('request');
var app = express();
app.get('/', function (req, res) {
request('http://stream6.jungletrain.net:8000').pipe(res);
stream.pipe(res);
});
var server = app.listen(3000, function () {
console.log('Server started')
});
I am aware this can be achieved by rolling an implementation with net, there is also icecast-stack but subjectively seen it only implements half of the Stream interfaces properly.
Using icecast, I was able to get this working both using the on('data') event and by piping it to the Express response:
var express = require('express');
var app = express();
var icecast = require('icecast');
var url = 'http://stream6.jungletrain.net:8000';
app.get('/', function(req, res) {
icecast.get(url, function(icecastRes) {
console.error(icecastRes.headers);
icecastRes.on('metadata', function(metadata) {
var parsed = icecast.parse(metadata);
console.error(parsed);
});
icecastRes.on('data', function(chunk) {
console.log(chunk);
})
});
});
var server = app.listen(3000, function() {
console.log('Server started')
});
Or simply:
app.get('/', function(req, res) {
icecast.get(url).pipe(res);
});
Also of some note:
It appears the icecast package has been superseded by https://www.npmjs.com/package/icy

Categories