How to use nock to intercept requests regardless of body - javascript

I'm trying to use nock in my tests to intercept the request calls i'm making from the native https module in Node.js. I'm using Promise.all to make two requests to the external server. I want my tests to intercept the calls, and check some of the form fields to make sure they're filled in as i want.
I have my class setup below (kept the most relevant parts of code in):
const archiver = require('archiver');
const { generateKeyPairSync } = require('crypto');
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
constructor() {
this.FILESTORE_USERNAME = process.env.FILESTORE_USERNAME;
this.FILESTORE_PASSWORD = process.env.FILESTORE_PASSWORD;
}
store(serviceName) {
const { publicKey, privateKey } = this._generateKeys();
return Promise.all([this._postKey(publicKey), this._postKey(privateKey)])
.then(() => {
return this._zipKeys(publicKey, privateKey, serviceName);
})
.catch((err) => {
throw err;
});
}
_postKey(key) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const form = new FormData();
form.append('file', key);
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', (keyName == 'publicKey') ? 'pub' : '');
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', (err) => {
reject(err);
});
form.pipe(post);
post.end();
});
}
_getOptions() {
return {
hostname: 'api.example.com',
path: '/media/files/',
method: 'POST',
};
}
}
module.exports = Platform;
And then, my testing code looks like the below. I'm using mocha, sinon, chai, sinon-chai and nock.
const Platform = require('/usr/src/app/api/Services/Platform');
const crypto = require('crypto');
const fs = require('fs');
const nock = require('nock');
const yauzl = require('yauzl');
describe('Platform', function() {
let platform;
beforeEach(() => {
platform = new Platform();
});
afterEach(() => {
const list = fs.readdirSync('/usr/src/app/api/Services/data/');
list.forEach((file) => {
fs.unlink('/usr/src/app/api/Services/data/' + file, (err) => {
if (err) throw err;
});
});
nock.cleanAll();
});
after(() => {
nock.restore();
});
describe('store', function() {
it('should post each generated key to an external storage place', async function() {
this.timeout(5000);
// const stub = sinon.stub(platform, '_postKey').resolves();
const scope = nock('https://api.example.com')
.persist()
.post('/media/files/', (body) => {
// console.log(body);
})
.reply(200);
let serviceName = 'test';
let actual = await platform.store(serviceName)
.catch((err) => {
(() => { throw err; }).should.not.throw();
});
console.log(scope);
// expect(stub.callCount).to.equal(2);
expect(actual).to.be.a('string');
expect(actual).to.include(serviceName + '.zip');
// stub.reset();
});
});
});
The problem I am coming across is this error that is thrown when running my tests:
AssertionError: expected [Function] to not throw an error but 'Error:
Nock: No match for request {\n "method": "POST",\n "url":
"https://api.example.com/media/files/",\n "headers": {\n
"content-type": "multipart/form-data;
boundary=--------------------------363749230271182821931703",\n
"authorization": "Basic abcdef1224u38454857483hfjdhjgtuyrwyt="\n },\n
"body":
"----------------------------363749230271182821931703\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\n-----BEGIN PUBLIC
KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq+QnVOYVjbrHIlAEsEoF\nZ4sTvqiB3sJGwecNhmgrUp9U8oqgoB50aW6VMsL71ATRyq9b3vMQKpjbU3R2RcOF\na6mlaBtBjxDGu2nEpGX++mtPCdD9HV7idvWgJ3XS0vGaCM//8ukY+VLBc1IB8CHC\nVj+8YOD5Y9TbdpwXR+0zCaiHwwd8MHIo1kBmQulIL7Avtjh55OmQZZtjO525lbqa\nWUZ24quDp38he2GjLDeTzHm9z1RjYJG6hS+Ui0s2xRUs6VAr7KFtiJmmjxPS9/vZ\nwQyFcz/R7AJKoEH8p7NE7nn/onbybJy+SWRxjXVH8afHkVoC65BiNoMiEzk1rIsx\ns92woHnq227JzYwFYcLD0W+TYjtGCB8+ks+QRIiV0pFJ3ja5VFIxjn9MxLntWcf2\nhsiYrmfJlqmpW1DMfZrtt41cJUFQwt7CpN72aix7btmd/q0syh6VVlQEHq/0nDky\nItv7dqyqZc9NNOMqK9/kXWhbq5cwS21mm+kTGas5KSdeIR0LH7uVtivB+LKum14e\nRDGascZcXZIVTbOeCxA6BD7LyaJPzXmlMy4spXlhGoDYyVRhpvv2K03Mg7ybiB4X\nEL1oJtiCFkRX5LtRJv0PCRJjaa3UvfyIuz8bHK4ANxIZqcwZwER+g02gw8iqNfMa\nDWXpfMJUU8TQuLGWZQaGJc8CAwEAAQ==\n-----END
PUBLIC
KEY-----\n\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Name\"\r\n\r\npublicKey\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMajor\"\r\n\r\napplication\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMinor\"\r\n\r\nx-pem-file\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Extension\"\r\n\r\npub\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"FileClass\"\r\n\r\nMFS::File\r\n----------------------------363749230271182821931703--\r\n"\n}'
was thrown
I take it it's because nock expects me to fake out the body for the request to get a correct match? Is there a way of just looking for requests made to that address, regardless of the body, so I can do my own tests or whatever.

When the post method of a Nock Scope is passed a second argument, it is used to match against the body of the request.
Docs for specifying the request body
In your test, you're passing a function as the second argument, but not returning true so Nock is not considering it a match.
From the docs:
Function: nock will evaluate the function providing the request body
object as first argument. Return true if it should be considered a
match
Since your goal is to assert form fields on the request, your best approach would be to leave the function there, do your assertions where the // console.log(body); line is, but add return true; to the end of the function.
You could also return true or false depending on if your form fields match your assertions, but in my experience it makes the error output from the test convoluted. My preference is to use standard chai expect() calls and let the assertions bubble errors before Nock continues with request matching.

Related

Jest Mocking Nested Singleton Dependency

I am fairly new to Jest and am struggling to understand correct way of testing code that uses nested dependencies like MongoDb.
Here is my file hierarchy
src/getOrder/index.js <- code I want to test
src/singletons/index.js <- a singleton that will be created and used by getOrder/index.js
My getOrder/index.js looks something like this
const { getSuper, catchError } = require('../singletons');
module.exports = async function (context, req)
{
let response = {}
if (req.body.guid)
{
try
{
response = await getOrder(context, req);
}
catch (err)
{
response = catchError(context, err)
}
}
else
{
response.data = 'Missing Payload'
response.status = 400;
}
context.res =
{
status: response.status,
headers: { 'Content-Type': 'application/json' },
body: response
}
}
async function getOrder(context, req)
{
//get API singleton
let sd = await getSuper()
//get order
let res = await sd.get(`/orders/${req.body.guid}`);
//return
return { 'status': res.status, 'data': res.data };
}
And then my singletons/index.js looks like this
const axios = require('axios');
const https = require('https');
const MongoClient = require('mongodb').MongoClient;
const DateTime = require('luxon').DateTime
const dbOptions =
{
useUnifiedTopology: false,
useNewUrlParser: true
};
//singleton variables
//mongo db connection
let db;
//super connection
let sd = {};
async function getDb()
{
//establish db connection if one isn't present
if (!db)
{
const client = new MongoClient(process.env.dbUri, dbOptions);
db = await (await client.connect()).db(process.env.dbName)
}
return db;
}
async function getSuper()
{
if (sd.exp && sd.exp > DateTime.local().toString().substr(0, 19))
{
return sd.instance;
}
else
{
//get db connection
let db = await getDb()
//get token
let token = await db.collection('secrets').findOne({ 'name': process.env.SDAccessToken })
//set exp time
sd.exp = token.exp;
if (!sd.instance)
{
//creat axios instance
sd.instance = axios.create({
baseURL: process.env.SDApiUrl,
httpsAgent: new https.Agent({ keepAlive: true }),
headers: { 'Content-Type': 'application/json' }
})
//set token
sd.instance.defaults.headers.common['Authorization'] = `Bearer ${token.value}`
}
else
{
//update token
sd.instance.defaults.headers.common['Authorization'] = `Bearer ${token.value}`
}
return sd.instance;
}
}
function catchError(context, err)
{
let response = {}
if (err.response && err.response.data)
{
response.status = err.response.status
response.data = err.response.data
context.log(response)
}
else
{
context.log(err)
response.status = 500
response.data = err
}
return response;
}
module.exports =
{
getDb,
getSuper,
catchError
}
Notice how when the sd singleton is initialized it also uses the db singleton (it calls getDb)
So I am unsure how to mock either of these from my test file. I AM trying to use #shelf/jest-mongodb to mock my database, however I am quite unsure how to turn into a singleton under a mock getDb function that will live somewhere outside of the test (so it can be reused in other tests, etc)
Essentially I figured out that as long as you have the mocking package I was using (#shelf/jest-mongodb) configured in the jest.config.js file and the jest-mongodb-config.js file then it will mock the db functionality where it is initialized. Whether it's directly in your tests, or 5 nest submodules deep in your project folder

Can "ERROR Unhandled Promise Rejection" be a problem in aws lambda?

Can this anyhow in the feature damage the flow they belong to?
I have a lambda that works behind a API Gateway websocket endpoint.
This simply asks for a clientId and a message payload, query all connections on dynamo for that clientId (multi device realtime dashboard frontend) and updates all interested users.
It's working fine if you test trought "wscat" on command line but it is buggy on real world browser using js websocket api or c# websocket api.
Doest this exceptin has anything to do with it?
const AWS = require("aws-sdk");
let dynamo = new AWS.DynamoDB.DocumentClient();
require("aws-sdk/clients/apigatewaymanagementapi");
const ORDERS_TABLE = "ordersTable";
const successfullResponse = {
statusCode: 200,
body: "everything is alright"
};
module.exports.sendMessageHandler = (event, context, callback) => {
console.log(event);
sendMessageToAllConnectedClientDevices(event)
.then(data => {
console.log("sucesso", data);
callback(null, successfullResponse);
})
.catch(err => {
console.log("erro: ", err);
callback(null, JSON.stringify(err));
});
};
const sendMessageToAllConnectedClientDevices = async event => {
try {
const body = JSON.parse(event.body);
const { clientId } = body;
console.log(
"handler.sendMessageToAllConnectedClientDevices.clientId: ",
clientId
);
const connectionIds = await getConnectionIds(clientId);
return await Promise.all(
connectionIds.Items.map(connectionId => {
send(event, connectionId.connectionId);
})
);
} catch (error) {
console.log("erro sendMessageToAllConnectedClientDevices");
return error;
}
};
const getConnectionIds = async clientId => {
console.log("handler.getConnectionIds.clientId: ", clientId);
const params = {
TableName: ORDERS_TABLE,
// IndexName: "client_gsi",
FilterExpression: "clientId = :cliend_id",
// KeyConditionExpression: "clientId = :cliend_id",
ProjectionExpression: "connectionId",
ExpressionAttributeValues: {
":cliend_id": clientId
}
};
console.log("handler.getConnectionIds.params: ", JSON.stringify(params));
const data = await dynamo.scan(params).promise();
return data;
};
const send = async (event, connectionId) => {
const body = JSON.parse(event.body);
const postData = body.data;
const endpoint =
event.requestContext.domainName + "/" + event.requestContext.stage;
const apigwManagementApi = new AWS.ApiGatewayManagementApi({
apiVersion: "2018-11-29",
endpoint: endpoint
});
const params = {
ConnectionId: connectionId,
Data: postData
};
return await apigwManagementApi.postToConnection(params).promise();
};
ERROR Unhandled Promise Rejection
I think problem is with API Gateway, check how you are handling information passing through to Lambda function (because browser sends some extra information as compared to command line call)

Always throws registration failed error while subscribing push notifications

I am working on solutions using which i can send desktop push notification to subscribed clients.
I have created basic solution in where whenever user click on button i ask user for whether they want to allow notifications for my app or not!
I am getting an error of "Registration failed - permission denied" whenever i click on button for first time.
So that i am not able to get required endpoints to save at backend
Here is my code
index.html
<html>
<head>
<title>PUSH NOT</title>
<script src="index.js"></script>
</head>
<body>
<button onclick="main()">Ask Permission</button>
</body>
</html>
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const registerServiceWorker = async () => {
const swRegistration = await navigator.serviceWorker.register("/service.js?"+Math.random());
return swRegistration;
};
const requestNotificationPermission = async () => {
const permission = await window.Notification.requestPermission();
// value of permission can be 'granted', 'default', 'denied'
// granted: user has accepted the request
// default: user has dismissed the notification permission popup by clicking on x
// denied: user has denied the request.
if (permission !== "granted") {
throw new Error("Permission not granted for Notification");
}
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const permission = await requestNotificationPermission();
};
// main(); we will not call main in the beginning.
service.js
// urlB64ToUint8Array is a magic function that will encode the base64 public key
// to Array buffer which is needed by the subscription option
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
self.addEventListener("activate", async () => {
try {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
const subscription = await self.registration.pushManager.subscribe(options);
console.log(JSON.stringify(subscription))
const response = await saveSubscription(subscription);
} catch (err) {
console.log(err.code)
console.log(err.message)
console.log(err.name)
console.log('Error', err)
}
});
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
Also i have created a bit of backend as well
const express = require("express");
const cors = require("cors");
const bodyParser = require("body-parser");
const webpush = require('web-push')
const app = express();
app.use(cors());
app.use(bodyParser.json());
const port = 4000;
app.get("/", (req, res) => res.send("Hello World!"));
const dummyDb = { subscription: null }; //dummy in memory store
const saveToDatabase = async subscription => {
// Since this is a demo app, I am going to save this in a dummy in memory store. Do not do this in your apps.
// Here you should be writing your db logic to save it.
dummyDb.subscription = subscription;
};
// The new /save-subscription endpoint
app.post("/save-subscription", async (req, res) => {
const subscription = req.body;
await saveToDatabase(subscription); //Method to save the subscription to Database
res.json({ message: "success" });
});
const vapidKeys = {
publicKey:
'BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM',
privateKey: 'mHSKS-uwqAiaiOgt4NMbzYUb7bseXydmKObi4v4bN6U',
}
webpush.setVapidDetails(
'mailto:janakprajapati90#email.com',
vapidKeys.publicKey,
vapidKeys.privateKey
)
const sendNotification = (subscription, dataToSend='') => {
webpush.sendNotification(subscription, dataToSend)
}
app.get('/send-notification', (req, res) => {
const subscription = {endpoint:"https://fcm.googleapis.com/fcm/send/dLjyDYvI8yo:APA91bErM4sn_wRIW6xCievhRZeJcIxTiH4r_oa58JG9PHUaHwX7hQlhMqp32xEKUrMFJpBTi14DeOlECrTsYduvHTTnb8lHVUv3DkS1FOT41hMK6zwMvlRvgWU_QDDS_GBYIMRbzjhg",expirationTime:null,keys:{"p256dh":"BE6kUQ4WTx6v8H-wtChgKAxh3hTiZhpfi4DqACBgNRoJHt44XymOWFkQTvRPnS_S9kmcOoDSgOVD4Wo8qDQzsS0",auth:"CfO4rOsisyA6axdxeFgI_g"}} //get subscription from your databse here.
const message = 'Hello World'
sendNotification(subscription, message)
res.json({ message: 'message sent' })
})
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
Please help me
Try the following code:
index.js
const check = () => {
if (!("serviceWorker" in navigator)) {
throw new Error("No Service Worker support!");
} else {
console.log("service worker supported")
}
if (!("PushManager" in window)) {
throw new Error("No Push API Support!");
} else {
console.log("PushManager worker supported")
}
};
const saveSubscription = async subscription => {
console.log("Save Sub")
const SERVER_URL = "http://localhost:4000/save-subscription";
const response = await fetch(SERVER_URL, {
method: "post",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(subscription)
});
return response.json();
};
const urlB64ToUint8Array = base64String => {
const padding = "=".repeat((4 - (base64String.length % 4)) % 4);
const base64 = (base64String + padding)
.replace(/\-/g, "+")
.replace(/_/g, "/");
const rawData = atob(base64);
const outputArray = new Uint8Array(rawData.length);
for (let i = 0; i < rawData.length; ++i) {
outputArray[i] = rawData.charCodeAt(i);
}
return outputArray;
};
const registerServiceWorker = async () => {
return navigator.serviceWorker.register("service.js?"+Math.random()).then((swRegistration) => {
console.log(swRegistration);
return swRegistration;
});
};
const requestNotificationPermission = async (swRegistration) => {
return window.Notification.requestPermission().then(() => {
const applicationServerKey = urlB64ToUint8Array(
"BFPtpIVOcn2y25il322-bHQIqXXm-OACBtFLdo0EnzGfs-jIGXgAzjY6vNapPb4MM1Z1WuTBUo0wcIpQznLhVGM"
);
const options = { applicationServerKey, userVisibleOnly: true };
return swRegistration.pushManager.subscribe(options).then((pushSubscription) => {
console.log(pushSubscription);
return pushSubscription;
});
});
};
const main = async () => {
check();
const swRegistration = await registerServiceWorker();
const subscription = await requestNotificationPermission(swRegistration);
// saveSubscription(subscription);
};
service.js
self.addEventListener("push", function(event) {
if (event.data) {
console.log("Push event!! ", event.data.text());
} else {
console.log("Push event but no data");
}
});
I can think of three reasons that the permission is denied
1) your site is not on https (including localhost that is not on https), the default behaviour from chrome as far as i know is to block notifications on http sites. If that's the case, click on the info icon near the url, then click on site settings, then change notifications to ask
2) if you are on Safari, then safari is using the deprecated interface of the Request permission, that is to say the value is not returned through the promise but through a callback so instead of
Notification.requestPermission().then(res => console.log(res))
it is
Notification.requestPermission(res => console.log(res))
3) Your browser settings are blocking the notifications request globally, to ensure that this is not your problem run the following code in the console (on a secured https site)
Notification.requestPermission().then(res => console.log(res))
if you receive the alert box then the problem is something else, if you don't then make sure that the browser is not blocking notifications requests

try to resize the stream image with sharp Node.js

I am trying to resize the width and height of input Stream-image from the user to the server with sharp function but nothing happens with the image. It keeps on his original size, How should I use the sharp function so that I can get the image smaller or bigger?
Please help me
This is how my code looks like:
'use strict';
const builder = require('botbuilder');
const restify = require('restify');
const utils = require('./utils.js');
const request = require('request');
const sharp = require('sharp');
const fs = require('fs');
const resizeImage = require('resize-image');
// Create chat connector for communicating with the Bot Framework Service
const connector = new builder.ChatConnector({
appId: process.env.MICROSOFT_APP_ID,
appPassword: process.env.MICROSOFT_APP_PASSWORD
});
// Setup Restify Server
const server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, () => {
console.log(`${server.name} listening to ${server.url}`);
});
// Listen for messages from users
server.post('/api/messages', connector.listen());
const bot = new builder.UniversalBot(connector);
// default dialog
//resize the images
//Sends greeting message when the bot is first added to a conversation
bot.on('conversationUpdate', function (message) {
if (message.membersAdded) {
message.membersAdded.forEach(function (identity) {
if (identity.id === message.address.bot.id) {
var reply = new builder.Message()
.address(message.address)
.text('Hi, please send a screenshot for the error');
bot.send(reply);
}
});
}
}
);
bot.dialog('/', function(session) {
if(utils.hasImageAttachment(session)){
//--others
var stream = utils.getImageStreamFromMessage(session.message);
***//resize the image stream
sharp('stream')
.resize(100, 100)
.toFile('stream', function(err) {
// output.jpg is a 200 pixels wide and 200 pixels high image
// containing a scaled and cropped version of input.jpg
});
//***
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: uriBase,
qs: params,
body: stream ,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key' : subscriptionKey
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
const obj = JSON.parse(body);
console.log(obj);
//------------ get the texts from json as string
if(obj.regions =="" ){
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
}else{
let buf = ''
if(obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if(a && a.lines) {
a.lines.forEach((p, q, r) => {
if(p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
Thanks
According the doc of Sharp of function toFile(), this function returns a promise when no callback is provided.
So there should be no I/O block when excusing toFile function, and continue runing the following code which is request.post in your code snippet. At that time, the image may not be modified.
You can try to either use promise style code flow, like:
sharp('stream')
.resize(100, 100)
.toFile('stream')
.then((err,info)=>{
//do request post
})
or put the request code inside the callback function of toFile(), like:
sharp('stream')
.resize(100, 100)
.toFile('stream',function(err,info)=>{
//do request post
})
Your use of sharp('stream') doesn't work because the function is looking for a string as its input and you are trying to feed it a stream. Per the docs, you need to read from the readableStream and then process the image.
The example below I tested (locally) and runs. As is, it will save the image file on the server in the location of the app.js file. The commented-out ".pipe(stream)" creates a writeableStream you could then access at a later point if that is what you need. In that case, you wouldn't use .toFile().
Hope of help!
bot.dialog('/', function (session) {
if (utils.hasImageAttachment(session)) {
//--others
var stream = utils.getImageStreamFromMessage(session.message);
var transformer = sharp()
.resize(100)
.jpeg()
.toFile('image.jpg', function (err) {
if (err)
console.log(err);
})
.on('info', function (err, info) {
session.send('Image height is ' + info.height);
});
stream.pipe(transformer); //.pipe(stream);
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: "https://smba.trafficmanager.net/apis",
qs: params,
body: stream,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': ""
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
console.log(body);
const obj = JSON.stringify(body);
console.log(body);
//------------ get the texts from json as string
if (obj.regions == "") {
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
} else {
let buf = ''
if (obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if (a && a.lines) {
a.lines.forEach((p, q, r) => {
if (p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
I have used Sharp in the below way in my case which works perfectly fine.
sharp('stream')
.png()
.resize(100, 100)
.toBuffer((err, buffer, info) => {
if (err)
console.log(err);
if (buffer) {
return buffer;
}
});
Sharp's toFile() saves the output in a file, so you could give a file name as an argument. toBuffer() will return a buffered object.
Hope it helps!

node.js check if a remote URL exists

How do I check to see if a URL exists without pulling it down? I use the following code, but it downloads the whole file. I just need to check that it exists.
app.get('/api/v1/urlCheck/', function (req,res) {
var url=req.query['url'];
var request = require('request');
request.get(url, {timeout: 30000, json:false}, function (error, result) {
res.send(result.body);
});
});
Appreciate any help!
Try this:
var http = require('http'),
options = {method: 'HEAD', host: 'stackoverflow.com', port: 80, path: '/'},
req = http.request(options, function(r) {
console.log(JSON.stringify(r.headers));
});
req.end();
2021 update
Use url-exist:
import urlExist from 'url-exist';
const exists = await urlExist('https://google.com');
// Handle result
console.log(exists);
2020 update
request has now been deprecated which has brought down url-exists with it. Use url-exist instead.
const urlExist = require("url-exist");
(async () => {
const exists = await urlExist("https://google.com");
// Handle result
console.log(exists)
})();
If you (for some reason) need to use it synchronously, you can use url-exist-sync.
2019 update
Since 2017, request and callback-style functions (from url-exists) have fallen out of use.
However, there is a fix. Swap url-exists for url-exist.
So instead of using:
const urlExists = require("url-exists")
urlExists("https://google.com", (_, exists) => {
// Handle result
console.log(exists)
})
Use this:
const urlExist = require("url-exist");
(async () => {
const exists = await urlExist("https://google.com");
// Handle result
console.log(exists)
})();
Original answer (2017)
If you have access to the request package, you can try this:
const request = require("request")
const urlExists = url => new Promise((resolve, reject) => request.head(url).on("response", res => resolve(res.statusCode.toString()[0] === "2")))
urlExists("https://google.com").then(exists => console.log(exists)) // true
Most of this logic is already provided by url-exists.
Thanks! Here it is, encapsulated in a function (updated on 5/30/17 with the require outside):
var http = require('http'),
url = require('url');
exports.checkUrlExists = function (Url, callback) {
var options = {
method: 'HEAD',
host: url.parse(Url).host,
port: 80,
path: url.parse(Url).pathname
};
var req = http.request(options, function (r) {
callback( r.statusCode== 200);});
req.end();
}
It's very quick (I get about 50 ms, but it will depend on your connection and the server speed). Note that it's also quite basic, i.e. it won't handle redirects very well...
Simply use url-exists npm package to test if url exists or not
var urlExists = require('url-exists');
urlExists('https://www.google.com', function(err, exists) {
console.log(exists); // true
});
urlExists('https://www.fakeurl.notreal', function(err, exists) {
console.log(exists); // false
});
It seems a lot of people have recommended a library to use, but url-exist includes a dependency of a data fetching lib so here is a clone of it using all native node modules:
const http = require('http');
const { parse, URL } = require('url');
// https://github.com/sindresorhus/is-url-superb/blob/main/index.js
function isUrl(str) {
if (typeof str !== 'string') {
return false;
}
const trimmedStr = str.trim();
if (trimmedStr.includes(' ')) {
return false;
}
try {
new URL(str); // eslint-disable-line no-new
return true;
} catch {
return false;
}
}
// https://github.com/Richienb/url-exist/blob/master/index.js
function urlExists(url) {
return new Promise((resolve) => {
if (!isUrl(url)) {
resolve(false);
}
const options = {
method: 'HEAD',
host: parse(url).host,
path: parse(url).pathname,
port: 80,
};
const req = http.request(options, (res) => {
resolve(res.statusCode < 400 || res.statusCode >= 500);
});
req.end();
});
}
urlExists(
'https://stackoverflow.com/questions/26007187/node-js-check-if-a-remote-url-exists'
).then(console.log);
This might also appeal to those who'd rather not install a dependency for a very simple purpose.
require into functions is wrong way in Node.
Followed ES6 method supports all correct http statuses and of course retrieve error if you have a bad 'host' like fff.kkk
checkUrlExists(host,cb) {
http.request({method:'HEAD',host,port:80,path: '/'}, (r) => {
cb(null, r.statusCode >= 200 && r.statusCode < 400 );
}).on('error', cb).end();
}
Take a look at the url-exists npm package https://www.npmjs.com/package/url-exists
Setting up:
$ npm install url-exists
Useage:
const urlExists = require('url-exists');
urlExists('https://www.google.com', function(err, exists) {
console.log(exists); // true
});
urlExists('https://www.fakeurl.notreal', function(err, exists) {
console.log(exists); // false
});
You can also promisify it to take advantage of await and async:
const util = require('util');
const urlExists = util.promisify(require('url-exists'));
let isExists = await urlExists('https://www.google.com'); // true
isExists = await urlExists('https://www.fakeurl.notreal'); // false
Happy coding!
Using the other responses as reference, here's a promisified version which also works with https uris (for node 6+):
const http = require('http');
const https = require('https');
const url = require('url');
const request = (opts = {}, cb) => {
const requester = opts.protocol === 'https:' ? https : http;
return requester.request(opts, cb);
};
module.exports = target => new Promise((resolve, reject) => {
let uri;
try {
uri = url.parse(target);
} catch (err) {
reject(new Error(`Invalid url ${target}`));
}
const options = {
method: 'HEAD',
host: uri.host,
protocol: uri.protocol,
port: uri.port,
path: uri.path,
timeout: 5 * 1000,
};
const req = request(options, (res) => {
const { statusCode } = res;
if (statusCode >= 200 && statusCode < 300) {
resolve(target);
} else {
reject(new Error(`Url ${target} not found.`));
}
});
req.on('error', reject);
req.end();
});
It can be used like this:
const urlExists = require('./url-exists')
urlExists('https://www.google.com')
.then(() => {
console.log('Google exists!');
})
.catch(() => {
console.error('Invalid url :(');
});
I see in your code that you are already using the request library, so just:
const request = require('request');
request.head('http://...', (error, res) => {
const exists = !error && res.statusCode === 200;
});
If you're using axios, you can fetch the head like:
const checkUrl = async (url) => {
try {
await axios.head(fullUrl);
return true;
} catch (error) {
if (error.response.status >= 400) {
return false;
}
}
}
You may want to customise the status code range for your requirements e.g. 401 (Unauthorized) could still mean a URL exists but you don't have access.
my awaitable async ES6 solution, doing a HEAD request:
// options for the http request
let options = {
host: 'google.de',
//port: 80, optional
//path: '/' optional
}
const http = require('http');
// creating a promise (all promises a can be awaited)
let isOk = await new Promise(resolve => {
// trigger the request ('HEAD' or 'GET' - you should check if you get the expected result for a HEAD request first (curl))
// then trigger the callback
http.request({method:'HEAD', host:options.host, port:options.port, path: options.path}, result =>
resolve(result.statusCode >= 200 && result.statusCode < 400)
).on('error', resolve).end();
});
// check if the result was NOT ok
if (!isOk)
console.error('could not get: ' + options.host);
else
console.info('url exists: ' + options.host);
Currently request module is being deprecated as #schlicki pointed out. One of the alternatives in the link he posted is got:
const got = require('got');
(async () => {
try {
const response = await got('https://www.nodesource.com/');
console.log(response.body);
//=> '<!doctype html> ...'
} catch (error) {
console.log(error.response.body);
//=> 'Internal server error ...'
}
})();
But with this method, you will get the whole HTML page in the reponse.body. In addition got may have many more functionalities you may not need. That's I wanted to add another alternative I found to the list. As I was using the portscanner library, I could use it for the same aim without downloading the content of the website. You may need to use the 443 port as well if the website works with https
var portscanner = require('portscanner')
// Checks the status of a single port
portscanner.checkPortStatus(80, 'www.google.es', function(error, status) {
// Status is 'open' if currently in use or 'closed' if available
console.log(status)
})
Anyway, the most close approach is url-exist module as #Richie Bendall explains in his post. I just wanted to add some other alternative
danwarfel's answer got me some of the way there but it's still not quite right: it leaks memory, doesn't follow redirects, doesn't support https (likely what you want) and doesn't actually answer the question - it just logs headers! Here's my version:
import * as https from "https";
// Return true if the URL is found and returns 200. Returns false if there are
// network errors or the status code is not 200. It will throw an exception
// for configuration errors (e.g. malformed URLs).
//
// Note this only supports https, not http.
//
async function isUrlFound(url: string, maxRedirects = 20): Promise<boolean> {
const [statusCode, location] = await new Promise<[number?, string?]>(
(resolve, _reject) => {
const req = https.request(
url,
{
method: "HEAD",
},
response => {
// This is necessary to avoid memory leaks.
response.on("readable", () => response.read());
resolve([response.statusCode, response.headers["location"]]);
},
);
req.on("error", _err => resolve([undefined, undefined]));
req.end();
},
);
if (
statusCode !== undefined &&
statusCode >= 300 &&
statusCode < 400 &&
location !== undefined &&
maxRedirects > 0
) {
return isUrlFound(location, maxRedirects - 1);
}
return statusCode === 200;
}
Minimally tested but it seems to work.

Categories