How to cancel previous express request, so that new request gets executed? - javascript

I have this endpoint. This api takes long time to get the response.
app.get('/api/execute_script',(req,res) =>{
//code here
}
I have following endpoint which will kill the process
app.get('/api/kill-process',(req,res) => {
//code here
}
but unless first api gets response second api doesnt get executed. How to cancel previous api request and execute the second request?

You can use an EventEmitter to kill the other process, all you'll need is a session/user/process identifier.
const EventEmitter = require('events');
const emitter = new EventEmitter();
app.get('/api/execute_script', async(req,res,next) => {
const eventName = `kill-${req.user.id}`; // User/session identifier
const proc = someProcess();
const listener = () => {
// or whatever you have to kill/destroy/abort the process
proc.abort()
}
try {
emitter.once(eventName, listener);
await proc
// only respond if the process was not aborted
res.send('process done')
} catch(e) {
// Process should reject if aborted
if(e.code !== 'aborted') {
// Or whatever status code
return res.status(504).send('Timeout');
}
// process error
next(e);
} finally {
// cleanup
emitter.removeListener(eventName, listener)
}
})
app.get('/api/kill-process',(req,res) => {
//code here
const eventName = `kill-${req.user.id}`;
// .emit returns true if the emitter has listener attached
const killed = emitter.emit(eventName);
res.send({ killed })
})

Related

fetch().then().catch(); doesn't catch `Uncaught (in promise) DOMException: The user aborted a request.`

I have an interface on an ESP32.
I am trying to have a Connected variable show up in an interface to signal the user if he is connected to said interface or not.
I am using the following method to constantly GET the configuration from the server in order update the interface.
// get json
async function get_json(api_path, options = {}) {
// const url = api_path;
// console.log(ROOT_URL);
// console.log(api_path);
const { timeout = 8000 } = options;
const controller = new AbortController();
const timeoutID = setTimeout(() => controller.abort(), timeout);
const response = await fetch(api_path, {
...options,
signal: controller.signal,
});
clearTimeout(timeoutID);
return response.json();
}
async function getSettings() {
get_json("/api/settings/get", {
timeout: 5000,
}).then((json_data) => {
// do something with the data
connected = true;
}).catch(function (error) {
// request timeout
console.log(error);
connected = false;
console.log(error.name === "AbortError");
});
}
Everything works dandy except the catch part.
Let's say the user changes the IP of the ESP. The ESP restarts and reconfigures to use the newly IP address. But the user stayed on the same page because connected is still set to true. In the console I get Uncaught (in promise) DOMException: The user aborted a request. for each request to getSettings() because I can't write a proper catch.
Basically, after the IP is changed, getSettings() tries to GET from a wrong address, so it's normal to throw some sort of error which I should catch and change the Connected variable to false and update it in the interface so that the user can go/move/navigate to the IP Address they have just inputted.
Edit:
This is how I update connected in the interface:
// check if connected to ESP
function connectedStatus() {
let conn = document.getElementById("connected");
conn.innerHTML = connected ? "Yes" : "No";
}
setInterval(connectedStatus, 500);
"Doesn't get_json() return a PROMISE?" - All async function does return promise, But your getSettings function doesn't wait for get_json to become resolved.
let delay = ms => new Promise(ok => setTimeout(ok, ms));
async function asyncFn() {
delay(2000).then(() => {
console.log("wait! 2 secs...")
});
console.log("thanks for waiting")
}
asyncFn()
As you can see "thanks for waiting" print first,
So our guess is that, getSettings indeed modify connected state but not immediately but later, But your getSettings resolve immediately, and you didn't wait for state (connected) change.
So you need to wait for any changes,
let delay = ms => new Promise(ok => setTimeout(ok, ms));
let state;
async function asyncFn() {
await delay(2000).then(() => {
state = true
console.log("wait! 2 secs...")
});
console.log("thanks for waiting")
}
asyncFn().then(() => {
console.log("state:", state)
})

Typescript: Web worker cannot create an object from an imported class

I'm attempting to create a new object from a class, in a Web worker. (The worker code is in Worker.ts, which generates worker.bundle.js. Wrapper.ts runs on the main thread and creates the worker, using worker.bundle.js.)
I am importing the class into my Web worker and, when the message event fires, I check to see if the message data has a specific property. If it does, I create a new object from my class.
But, when I run the code, I never hit the debugger breakpoint. If I comment out the object creation, I will hit the debugger breakpoint anytime the evaluated condition is true.
Trying to troubleshoot, I added error handling in a separate addEventListener call, with nothing but a debugger statement. That breakpoint is never hit, regardless of whether I comment or un-comment the object creation.
Nothing I've read anywhere says (or even suggests) that you can't use a class to create an object in a Web worker. yet, my worker fails silently every time I try to do so. Is there something special about class usage in a web worker, which causes different behavior than when you are working in the main browser thread?
Thanks!
Worker.ts
import { MyTempClass } from '../../LibraryB/MyTempClass';
addEventListener('message', function (e) {
if(typeof e.data.id !== 'undefined') {
const {id, payload} = e.data;
debugger;
// The ^^^ debugger will not be hit, if I uncomment the next line...
//let test = new MyTempClass('https://somesite.org');
const err = null;
const msg = {
id,
err,
payload: 'result'
}
// #ts-ignore
this.postMessage(msg);
}
});
addEventListener('error', function(e) {
debugger;
// ^^^ this breakpoint is never reached. So, apparently, there aren't any errors being thrown.
});
Wrapper.ts
interface IWorkerMessage {
id: number;
payload: any;
}
const resolves = {};
const rejects = {};
let globalMsgId = 0;
// Activate the worker, returning a promise
const sendMsg = (payload: any, worker: Worker): Promise<any> => {
const msgId = globalMsgId++;
const msg: IWorkerMessage = {
id: msgId,
payload
};
return new Promise(function (resolve, reject) {
// save callbacks for later
resolves[msgId] = resolve;
rejects[msgId] = reject;
worker.postMessage(msg);
});
};
// Handle incoming result
const handleMsg = (msg: MessageEvent): void => {
if (Object.prototype.hasOwnProperty.call(msg.data, 'payload')) {
const { id, err, payload } = msg.data;
//debugger;
if (payload) {
const resolve = resolves[id];
if (resolve) {
resolve(payload);
}
} else {
// error condition
const reject = rejects[id];
if (reject) {
if (err) {
reject(err);
} else {
reject('Got nothing');
}
}
}
// purge used callbacks
delete resolves[id];
delete rejects[id];
}
};
// Wrapper class
class Wrapper {
worker: Worker;
constructor() {
this.worker = new Worker('./Worker.bundle.js');
this.worker.onmessage = handleMsg;
}
call = (payload: any) => {
return sendMsg(payload, this.worker);
};
}
export default Wrapper;

How do I correctly test asynchronous stream error event handling with mocha/sinon/chai?

I am testing an async method that returns some data from a web request using the native https.request() method in NodeJS. I am using mocha, chai, and sinon with the relevant extensions for each.
The method I'm testing essentially wraps the boilerplate https.request() code provided in the NodeJS docs in a Promise and resolves when the response 'end' event is received or rejects if the request 'error' event is received. The bits relevant to discussion look like this:
async fetch(...) {
// setup
return new Promise((resolve, reject) => {
const req = https.request(url, opts, (res) => {
// handle response events
});
req.on('error', (e) => {
logger.error(e); // <-- this is what i want to verify
reject(e);
});
req.end();
});
}
As indicated in the comment, what I want to test is that if the request error event is emitted, the error gets logged correctly. This is what I'm currently doing to achieve this:
it('should log request error events', async () => {
const sut = new MyService();
const err = new Error('boom');
const req = new EventEmitter();
req.end = sinon.fake();
const res = new EventEmitter();
sinon.stub(logger, 'error');
sinon.stub(https, 'request').callsFake((url, opt, cb) => {
cb(res);
return req;
});
try {
const response = sut.fetch(...);
req.emit('error', err);
await response;
} catch() {}
logger.error.should.have.been.calledOnceWith(err);
});
This feels like a hack, but I can't figure out how to do this correctly using the normal patterns. The main problem is I need to emit the error event after the method is called but before the promise is fulfilled, and I can't see how to do that if I am returning the promise as you normally would with mocha.
I should have thought of this, but #Bergi's comment about using setTimeout() in the fake gave me an idea and I now have this working with the preferred syntax:
it('should log request error events', () => {
const sut = new MyService();
const err = new Error('boom');
const req = new EventEmitter();
req.end = sinon.fake();
const res = new EventEmitter();
sinon.stub(logger, 'error');
sinon.stub(https, 'request').callsFake((url, opt, cb) => {
cb(res);
setTimeout(() => { req.emit('error', err); });
return req;
});
return sut.fetch(...).should.eventually.be.rejectedWith(err)
.then(() => {
logger.error.should.have.been.calledOnceWith(err);
});
});
I don't like adding any delays in unit tests unless I'm specifically testing delayed functionality, so I used setTimeout() with 0 delay just to push the emit call to the end of the message queue. By moving it to the fake I was able to just use promise chaining to test the call to the logger method.

Why does socket.io-client not connect inside of an RXJS subscription?

I am trying to initiate a socket.io connection upon receiving a certain event in an rxjs observable. The socket.io connection starts fine inside startEventStream if it's outside of the subscribe, but when placed in the subscribe, even though the event is firing, the callback inside socket is never called.
function startEventStream(
stateLoaded$: Observable<LoginEvent>
): Observable<AoEvent> {
const socket = require('socket.io-client')('http://localhost:8003')
const ret = new Subject<AoEvent>()
const merged = merge(stateLoaded$, ret)
const session = '895e17a0-6c2b-11ea-8d86-45f581e4b250'
const token =
'f3ccdd81c2ece391891cba4f7d4eb8466d3d44675dd70f11e21190ae13dfdf69'
merged.subscribe({
next(val) {
process.nextTick(() => {
if (val.type == 'state-loaded') {
console.log('we should be connecting') // this prints
socket.on('connect', function() {
console.log('connected') // this doesn't print
ret.next({ type: 'socket-connected' })
socket.emit('authentication', {
session,
token
})
})
}
})
}
})
return ret
}
I got it working by passing { autoconnect: false } into the initialization of the socket and doing socket.on() inside of the subscribe. I think the connect event was getting discarded before the subscription function was called, and so the callback was never firing. Starting the connection inside solves the problem.

Why MessageChannel blocks forever and the client never gets message?

I have a problem where my MessageChannel does not send a message from a service worker to a client. Why MessageChannel blocks forever and the client never gets message?
In the sw on fetch event I create a MessageChannel then I do postMessage, however the client never gets addEventListener('message' fired why ?
For service worker I created method like that:
self.addEventListener("fetch", async (event) => {
// Exit early if we don't have access to the client.
// Eg, if it's cross-origin.
if (!event.clientId) return null;
// eslint-disable-next-line
const client = await clients.get(event.clientId);
const response = await sendMessageToClient(client, { url: event.request.url }, 5000);
})
async function sendMessageToClient(client, message, timeoutAfter) {
// Exit early if we don't get the client.
// Eg, if it closed.
if (!client) return null;
// This wraps the message posting/response in a promise, which will resolve if the response doesn't
// contain an error, and reject with the error if it does.
return await new Promise((resolve, reject) => {
const messageChannel = new MessageChannel();
messageChannel.port1.onmessage = function (event) {
if (event.data && event.data.error) {
reject(event.data.error);
} else {
resolve(event.data);
}
};
// This sends the message data as well as transferring messageChannel.port2 to the client.
// The client can then use the transferred port to reply via postMessage(), which
// will in turn trigger the onmessage handler on messageChannel.port1.
// See https://html.spec.whatwg.org/multipage/workers.html#dom-worker-postmessage
client.postMessage(message, [messageChannel.port2]);
// Set up the timeout
setTimeout(() => {
messageChannel.port1.close();
messageChannel.port2.close();
reject('Promise timed out after ' + timeoutAfter + ' ms');
}, timeoutAfter);
});
}
On the client I expect to get message here:
// Set up a listener for messages posted from the service worker.
// The service worker is set to post a message to specific client only
// so you should see this message event fire once.
// You can force it to fire again by visiting this page in an Incognito window.
navigator.serviceWorker.addEventListener('message', function (event) {
console.log(event.data);
});
But I never get the message on client and if there is no time out it will w8 forever.
Update
what i have tried:
window.addEventListener('message', function (event) { console.log('window.addEventListener', event.data) });
window.onmessage = function (event) { console.log('window.onmessage', event.data) };
navigator.serviceWorker.addEventListener('message', function (event) { console.log('navigator.serviceWorker.addEventListener', event.data) });
navigator.serviceWorker.onmessage = function (event) { console.log('navigator.serviceWorker.onmessage', event.data) };
// navigator.serviceWorker.controller.addEventListener('message', function (event) { console.log('navigator.serviceWorker.contoller.addEventListener', event.data) });
// navigator.serviceWorker.controller.onmessage = function (event) { console.log('navigator.serviceWorker.contoller.onmessage', event.data) };
navigator.serviceWorker.ready.then(function (registration) {
console.log('navigator.serviceWorker.ready');
registration.active.addEventListener('message', function (event) { console.log('registration.active.addEventListener', event.data) });
registration.active.onmessage = function (event) { console.log('registration.active.onmessage', event.data) };
});
navigator.serviceWorker.register("sw.js")
.then(function (registration) {
// Registration was successful
console.log("ServiceWorker registration successful with scope: ", registration.scope);
})
.catch(function (error) {
console.error("Service Worker Error", error);
});
}
and the only logs I got were:
Navigated to http://localhost:3000/ navigator.serviceWorker.ready
ServiceWorker registration successful with scope:
http://localhost:3000/

Categories