I am trying to resize the width and height of input Stream-image from the user to the server with sharp function but nothing happens with the image. It keeps on his original size, How should I use the sharp function so that I can get the image smaller or bigger?
Please help me
This is how my code looks like:
'use strict';
const builder = require('botbuilder');
const restify = require('restify');
const utils = require('./utils.js');
const request = require('request');
const sharp = require('sharp');
const fs = require('fs');
const resizeImage = require('resize-image');
// Create chat connector for communicating with the Bot Framework Service
const connector = new builder.ChatConnector({
appId: process.env.MICROSOFT_APP_ID,
appPassword: process.env.MICROSOFT_APP_PASSWORD
});
// Setup Restify Server
const server = restify.createServer();
server.listen(process.env.port || process.env.PORT || 3978, () => {
console.log(`${server.name} listening to ${server.url}`);
});
// Listen for messages from users
server.post('/api/messages', connector.listen());
const bot = new builder.UniversalBot(connector);
// default dialog
//resize the images
//Sends greeting message when the bot is first added to a conversation
bot.on('conversationUpdate', function (message) {
if (message.membersAdded) {
message.membersAdded.forEach(function (identity) {
if (identity.id === message.address.bot.id) {
var reply = new builder.Message()
.address(message.address)
.text('Hi, please send a screenshot for the error');
bot.send(reply);
}
});
}
}
);
bot.dialog('/', function(session) {
if(utils.hasImageAttachment(session)){
//--others
var stream = utils.getImageStreamFromMessage(session.message);
***//resize the image stream
sharp('stream')
.resize(100, 100)
.toFile('stream', function(err) {
// output.jpg is a 200 pixels wide and 200 pixels high image
// containing a scaled and cropped version of input.jpg
});
//***
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: uriBase,
qs: params,
body: stream ,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key' : subscriptionKey
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
const obj = JSON.parse(body);
console.log(obj);
//------------ get the texts from json as string
if(obj.regions =="" ){
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
}else{
let buf = ''
if(obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if(a && a.lines) {
a.lines.forEach((p, q, r) => {
if(p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
Thanks
According the doc of Sharp of function toFile(), this function returns a promise when no callback is provided.
So there should be no I/O block when excusing toFile function, and continue runing the following code which is request.post in your code snippet. At that time, the image may not be modified.
You can try to either use promise style code flow, like:
sharp('stream')
.resize(100, 100)
.toFile('stream')
.then((err,info)=>{
//do request post
})
or put the request code inside the callback function of toFile(), like:
sharp('stream')
.resize(100, 100)
.toFile('stream',function(err,info)=>{
//do request post
})
Your use of sharp('stream') doesn't work because the function is looking for a string as its input and you are trying to feed it a stream. Per the docs, you need to read from the readableStream and then process the image.
The example below I tested (locally) and runs. As is, it will save the image file on the server in the location of the app.js file. The commented-out ".pipe(stream)" creates a writeableStream you could then access at a later point if that is what you need. In that case, you wouldn't use .toFile().
Hope of help!
bot.dialog('/', function (session) {
if (utils.hasImageAttachment(session)) {
//--others
var stream = utils.getImageStreamFromMessage(session.message);
var transformer = sharp()
.resize(100)
.jpeg()
.toFile('image.jpg', function (err) {
if (err)
console.log(err);
})
.on('info', function (err, info) {
session.send('Image height is ' + info.height);
});
stream.pipe(transformer); //.pipe(stream);
const params = {
'language': 'en',
'detectOrientation': 'true',
};
const options = {
uri: "https://smba.trafficmanager.net/apis",
qs: params,
body: stream,
headers: {
'Content-Type': 'application/octet-stream',
'Ocp-Apim-Subscription-Key': ""
}
};
request.post(options, (error, response, body) => {
if (error) {
console.log('Error: ', error);
return;
}
console.log(body);
const obj = JSON.stringify(body);
console.log(body);
//------------ get the texts from json as string
if (obj.regions == "") {
session.send('OOOOPS I CANNOT READ ANYTHING IN THISE IMAGE :(');
} else {
let buf = ''
if (obj && obj.regions) {
obj.regions.forEach((a, b, c) => {
if (a && a.lines) {
a.lines.forEach((p, q, r) => {
if (p && p.words) {
p.words.forEach((x, y, z) => {
buf += ` ${x.text} `
})
}
})
}
})
}
session.send(buf);
}
});
} else {
session.send('nothing');
}
});
I have used Sharp in the below way in my case which works perfectly fine.
sharp('stream')
.png()
.resize(100, 100)
.toBuffer((err, buffer, info) => {
if (err)
console.log(err);
if (buffer) {
return buffer;
}
});
Sharp's toFile() saves the output in a file, so you could give a file name as an argument. toBuffer() will return a buffered object.
Hope it helps!
Related
I've written an application in node.js consisting of a server and a client for storing/uploading files.
For reproduction purposes, here's a proof of concept using a null write stream in the server and a random read stream in the client.
Using node.js 12.19.0 on Ubuntu 18.04. The client depends on node-fetch v2.6.1.
The issue I have is after 60 seconds the connection is reset and haven't found a way to make this work.
Any ideas are appreciated.
Thank you.
testServer.js
// -- DevNull Start --
var util = require('util')
, stream = require('stream')
, Writable = stream.Writable
, setImmediate = setImmediate || function (fn) { setTimeout(fn, 0) }
;
util.inherits(DevNull, Writable);
function DevNull (opts) {
if (!(this instanceof DevNull)) return new DevNull(opts);
opts = opts || {};
Writable.call(this, opts);
}
DevNull.prototype._write = function (chunk, encoding, cb) {
setImmediate(cb);
}
// -- DevNull End --
const http = require('http');
const server = http.createServer();
server.on('request', async (req, res) => {
try {
req.socket.on('end', function() {
console.log('SOCKET END: other end of the socket sends a FIN packet');
});
req.socket.on('timeout', function() {
console.log('SOCKET TIMEOUT');
});
req.socket.on('error', function(error) {
console.log('SOCKET ERROR: ' + JSON.stringify(error));
});
req.socket.on('close', function(had_error) {
console.log('SOCKET CLOSED. IT WAS ERROR: ' + had_error);
});
const writeStream = DevNull();
const promise = new Promise((resolve, reject) => {
req.on('end', resolve);
req.on('error', reject);
});
req.pipe(writeStream);
await promise;
res.writeHead(200);
res.end('OK');
} catch (err) {
res.writeHead(500);
res.end(err.message);
}
});
server.listen(8081)
.on('listening', () => { console.log('Listening on port', server.address().port); });
testClient.js
// -- RandomStream Start --
var crypto = require('crypto');
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
function RandomStream(length, options) {
// allow calling with or without new
if (!(this instanceof RandomStream)) {
return new RandomStream(length, options);
}
// init Readable
Readable.call(this, options);
// save the length to generate
this.lenToGenerate = length;
}
util.inherits(RandomStream, Readable);
RandomStream.prototype._read = function (size) {
if (!size) size = 1024; // default size
var ready = true;
while (ready) { // only cont while push returns true
if (size > this.lenToGenerate) { // only this left
size = this.lenToGenerate;
}
if (size) {
ready = this.push(crypto.randomBytes(size));
this.lenToGenerate -= size;
}
// when done, push null and exit loop
if (!this.lenToGenerate) {
this.push(null);
ready = false;
}
}
};
// -- RandomStream End --
const fetch = require('node-fetch');
const runSuccess = async () => { // Runs in ~35 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(256e6) // new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
// runSuccess().then(() => process.exit(0));
runFail().then(() => process.exit(0));
I tried (unsuccessfully) to reproduce what you are seeing based on your code example. Neither the success call is completing in ~35 seconds nor is the error being thrown in 60 seconds.
However, that being said, I think what is happening here is that your client is terminating the request.
You can increase the timeout by adding a httpAgent to the fetch PUT call. You can then set a timeout in the httpAgent.
const http = require('http');
...
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6),
agent: new http.Agent({ keepAlive: true, timeout: 300000 })
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
See the fetch docs for adding a custom http(s) agent here
See options for creating http(s) agent here
This turned out to be a bug in node.js
Discussion here: https://github.com/nodejs/node/issues/35661
So I am writing a Lambda that will take in some form data via a straight POST through API Gateway (testing using Postman for now) and then send that image to S3 for storage. Every time I run it, the image uploaded to S3 is corrupted and won't open properly. I have seen people having to decode/encode the incoming data but I feel like I have tried everything using Buffer.from. I am only looking to store either .png or .jpg. The below code does not reflect my attempts using Base64 encoding/decoding seeing they all failed. Here is what I have so far -
Sample Request in postman
{
image: (uploaded .jpg/.png),
metadata: {tag: 'iPhone'}
}
Lambda
const AWS = require('aws-sdk')
const multipart = require('aws-lambda-multipart-parser')
const s3 = new AWS.S3();
exports.handler = async (event) => {
const form = multipart.parse(event, false)
const s3_response = await upload_s3(form)
return {
statusCode: '200',
body: JSON.stringify({ data: data })
}
};
const upload_s3 = async (form) => {
const uniqueId = Math.random().toString(36).substr(2, 9);
const key = `${uniqueId}_${form.image.filename}`
const request = {
Bucket: 'bucket-name',
Key: key,
Body: form.image.content,
ContentType: form.image.contentType,
}
try {
const data = await s3.putObject(request).promise()
return data
} catch (e) {
console.log('Error uploading to S3: ', e)
return e
}
}
EDIT:
I am now atempting to save the image into the /tmp directory then use a read stream to upload to s3. Here is some code for that
s3 upload function
const AWS = require('aws-sdk')
const fs = require('fs')
const s3 = new AWS.S3()
module.exports = {
upload: (file) => {
return new Promise((resolve, reject) => {
const key = `${Date.now()}.${file.extension}`
const bodyStream = fs.createReadStream(file.path)
const params = {
Bucket: process.env.S3_BucketName,
Key: key,
Body: bodyStream,
ContentType: file.type
}
s3.upload(params, (err, data) => {
if (err) {
return reject(err)
}
return resolve(data)
}
)
})
}
}
form parser function
const busboy = require('busboy')
module.exports = {
parse: (req, temp) => {
const ctype = req.headers['Content-Type'] || req.headers['content-type']
let parsed_file = {}
return new Promise((resolve) => {
try {
const bb = new busboy({
headers: { 'content-type': ctype },
limits: {
fileSize: 31457280,
files: 1,
}
})
bb.on('file', function (fieldname, file, filename, encoding, mimetype) {
const stream = temp.createWriteStream()
const ext = filename.split('.')[1]
console.log('parser -- ext ', ext)
parsed_file = { name: filename, path: stream.path, f: file, type: mimetype, extension: ext }
file.pipe(stream)
}).on('finish', () => {
resolve(parsed_file)
}).on('error', err => {
console.err(err)
resolve({ err: 'Form data is invalid: parsing error' })
})
if (req.end) {
req.pipe(bb)
} else {
bb.write(req.body, req.isBase64Encoded ? 'base64' : 'binary')
}
return bb.end()
} catch (e) {
console.error(e)
return resolve({ err: 'Form data is invalid: parsing error' })
}
})
}
}
handler
const form_parser = require('./form-parser').parse
const s3_upload = require('./s3-upload').upload
const temp = require('temp')
exports.handler = async (event, context) => {
temp.track()
const parsed_file = await form_parser(event, temp)
console.log('index -- parsed form', parsed_file)
const result = await s3_upload(parsed_file)
console.log('index -- s3 result', result)
temp.cleanup()
return {
statusCode: '200',
body: JSON.stringify(result)
}
}
The above edited code is a combination of other code and a github repo I found that is trying to achieve the same results. Even with this solution the file is still corrupted
Figured out this issue. Code works perfectly fine - it was an issue with API Gateway. Need to go into the API Gateway settings and set thee Binary Media Type to multipart/form-data then re-deploy the API. Hope this helps someone else who is banging their head against the wall on figuring out sending images via form data to a lambda.
I'm trying to use nock in my tests to intercept the request calls i'm making from the native https module in Node.js. I'm using Promise.all to make two requests to the external server. I want my tests to intercept the calls, and check some of the form fields to make sure they're filled in as i want.
I have my class setup below (kept the most relevant parts of code in):
const archiver = require('archiver');
const { generateKeyPairSync } = require('crypto');
const FormData = require('form-data');
const fs = require('fs');
const https = require('https');
class Platform {
constructor() {
this.FILESTORE_USERNAME = process.env.FILESTORE_USERNAME;
this.FILESTORE_PASSWORD = process.env.FILESTORE_PASSWORD;
}
store(serviceName) {
const { publicKey, privateKey } = this._generateKeys();
return Promise.all([this._postKey(publicKey), this._postKey(privateKey)])
.then(() => {
return this._zipKeys(publicKey, privateKey, serviceName);
})
.catch((err) => {
throw err;
});
}
_postKey(key) {
const options = this._getOptions();
const keyName = (key.search(/(PUBLIC)/) !== -1) ? 'publicKey' : 'privateKey';
const form = new FormData();
form.append('file', key);
form.append('Name', keyName);
form.append('MimeMajor', 'application');
form.append('MimeMinor', 'x-pem-file');
form.append('Extension', (keyName == 'publicKey') ? 'pub' : '');
form.append('FileClass', 'MFS::File');
options.headers = form.getHeaders();
options.headers.Authorization = 'Basic ' + Buffer.from(this.FILESTORE_USERNAME + ':' + this.FILESTORE_PASSWORD).toString('base64');
return new Promise((resolve, reject) => {
let post = https.request(options, (res) => {
let data = '';
if (res.statusCode < 200 || res.statusCode > 299) {
reject(new Error('File Storage API returned a status code outside of acceptable range: ' + res.statusCode));
} else {
res.setEncoding('utf8');
res.on('data', (chunk) => {
data += chunk;
});
res.on('error', (err) => {
reject(err);
});
res.on('end', () => {
if (data) {
resolve(JSON.parse(data));
} else {
resolve();
}
});
}
});
post.on('error', (err) => {
reject(err);
});
form.pipe(post);
post.end();
});
}
_getOptions() {
return {
hostname: 'api.example.com',
path: '/media/files/',
method: 'POST',
};
}
}
module.exports = Platform;
And then, my testing code looks like the below. I'm using mocha, sinon, chai, sinon-chai and nock.
const Platform = require('/usr/src/app/api/Services/Platform');
const crypto = require('crypto');
const fs = require('fs');
const nock = require('nock');
const yauzl = require('yauzl');
describe('Platform', function() {
let platform;
beforeEach(() => {
platform = new Platform();
});
afterEach(() => {
const list = fs.readdirSync('/usr/src/app/api/Services/data/');
list.forEach((file) => {
fs.unlink('/usr/src/app/api/Services/data/' + file, (err) => {
if (err) throw err;
});
});
nock.cleanAll();
});
after(() => {
nock.restore();
});
describe('store', function() {
it('should post each generated key to an external storage place', async function() {
this.timeout(5000);
// const stub = sinon.stub(platform, '_postKey').resolves();
const scope = nock('https://api.example.com')
.persist()
.post('/media/files/', (body) => {
// console.log(body);
})
.reply(200);
let serviceName = 'test';
let actual = await platform.store(serviceName)
.catch((err) => {
(() => { throw err; }).should.not.throw();
});
console.log(scope);
// expect(stub.callCount).to.equal(2);
expect(actual).to.be.a('string');
expect(actual).to.include(serviceName + '.zip');
// stub.reset();
});
});
});
The problem I am coming across is this error that is thrown when running my tests:
AssertionError: expected [Function] to not throw an error but 'Error:
Nock: No match for request {\n "method": "POST",\n "url":
"https://api.example.com/media/files/",\n "headers": {\n
"content-type": "multipart/form-data;
boundary=--------------------------363749230271182821931703",\n
"authorization": "Basic abcdef1224u38454857483hfjdhjgtuyrwyt="\n },\n
"body":
"----------------------------363749230271182821931703\r\nContent-Disposition: form-data; name=\"file\"\r\n\r\n-----BEGIN PUBLIC
KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq+QnVOYVjbrHIlAEsEoF\nZ4sTvqiB3sJGwecNhmgrUp9U8oqgoB50aW6VMsL71ATRyq9b3vMQKpjbU3R2RcOF\na6mlaBtBjxDGu2nEpGX++mtPCdD9HV7idvWgJ3XS0vGaCM//8ukY+VLBc1IB8CHC\nVj+8YOD5Y9TbdpwXR+0zCaiHwwd8MHIo1kBmQulIL7Avtjh55OmQZZtjO525lbqa\nWUZ24quDp38he2GjLDeTzHm9z1RjYJG6hS+Ui0s2xRUs6VAr7KFtiJmmjxPS9/vZ\nwQyFcz/R7AJKoEH8p7NE7nn/onbybJy+SWRxjXVH8afHkVoC65BiNoMiEzk1rIsx\ns92woHnq227JzYwFYcLD0W+TYjtGCB8+ks+QRIiV0pFJ3ja5VFIxjn9MxLntWcf2\nhsiYrmfJlqmpW1DMfZrtt41cJUFQwt7CpN72aix7btmd/q0syh6VVlQEHq/0nDky\nItv7dqyqZc9NNOMqK9/kXWhbq5cwS21mm+kTGas5KSdeIR0LH7uVtivB+LKum14e\nRDGascZcXZIVTbOeCxA6BD7LyaJPzXmlMy4spXlhGoDYyVRhpvv2K03Mg7ybiB4X\nEL1oJtiCFkRX5LtRJv0PCRJjaa3UvfyIuz8bHK4ANxIZqcwZwER+g02gw8iqNfMa\nDWXpfMJUU8TQuLGWZQaGJc8CAwEAAQ==\n-----END
PUBLIC
KEY-----\n\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Name\"\r\n\r\npublicKey\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMajor\"\r\n\r\napplication\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"MimeMinor\"\r\n\r\nx-pem-file\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"Extension\"\r\n\r\npub\r\n----------------------------363749230271182821931703\r\nContent-Disposition: form-data;
name=\"FileClass\"\r\n\r\nMFS::File\r\n----------------------------363749230271182821931703--\r\n"\n}'
was thrown
I take it it's because nock expects me to fake out the body for the request to get a correct match? Is there a way of just looking for requests made to that address, regardless of the body, so I can do my own tests or whatever.
When the post method of a Nock Scope is passed a second argument, it is used to match against the body of the request.
Docs for specifying the request body
In your test, you're passing a function as the second argument, but not returning true so Nock is not considering it a match.
From the docs:
Function: nock will evaluate the function providing the request body
object as first argument. Return true if it should be considered a
match
Since your goal is to assert form fields on the request, your best approach would be to leave the function there, do your assertions where the // console.log(body); line is, but add return true; to the end of the function.
You could also return true or false depending on if your form fields match your assertions, but in my experience it makes the error output from the test convoluted. My preference is to use standard chai expect() calls and let the assertions bubble errors before Nock continues with request matching.
I am building a node application, and trying to neatly organize my code. I wrote a serial module that imports the serial libs and handles the connection. My intention was to write a basic module and then reuse it over and over again in different projects as needed. The only part that changes per use is how the incoming serial data is handled. For this reason I would like to pull out following handler and redefine it as per the project needs. How can I use module exports to redefine only this section of the file?
I have tried added myParser to exports, but that gives me a null and I would be out of scope.
Handler to redefine/change/overload for each new project
myParser.on('data', (data) => {
console.log(data)
//DO SOMETHING WITH DATA
});
Example usage: main.js
const serial = require('./serial');
const dataParser = require('./dataParser');
const serial = require('./serial');
//call connect with CL args
serial.connect(process.argv[2], Number(process.argv[3]))
serial.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
serial.send('Error');
});
Full JS Module below serial.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
const _d = String.fromCharCode(13); //char EOL
let myPort = null;
let myParser = null;
function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
myPort = new SerialPort(portName, {baudRate: baudRate})
myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
}
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
module.exports = {
connect, getPorts, send, close
}
The problem is that a module is used where a class or a factory would be appropriate. myParser cannot exist without connect being called, so it doesn't make sense to make it available as module property, it would be unavailable by default, and multiple connect calls would override it.
It can be a factory:
module.exports = function connect(port, baud) {
let portName = port || `COM1`;
let baudRate = baud || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let myParser = myPort.pipe(new ReadLine({ delimiter: '\n'}))
//Handlers
myPort.on('open', () => {
console.log(`port ${portName} open`)
});
myParser.on('data', (data) => {
console.log(data)
});
myPort.on('close', () => {
console.log(`port ${portName} closed`)
});
myPort.on('error', (err) => {
console.error('port error: ' + err)
});
function getPorts() {
let portlist = [];
SerialPort.list((err, ports) => {
ports.forEach(port => {
portlist.push(port.comName)
});
})
return portlist;
}
function send(data) {
myPort.write(JSON.stringify(data) + _d, function (err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
function close() {
myPort.close();
}
return {
myParser, getPorts, send, close
};
}
So it could be used like:
const serial = require('./serial');
const connection = serial(...);
connection.myParser.on('data',(data) => {
//Do something unique with data
if (dataParser.parse(data) == 0)
connection.send('Error');
});
I have a app.get which inside of it is quite a bit of logic. Which everything works great aside from some of the logic being called twice for some reason. I have noticed when I was saving something to by db that it would save two rows.
So I put a console.log in that area and sure enough it was logging it twice.
Any reason why this is happening?
app.get('/shopify/callback', (req, res) => {
const { shop, hmac, code, state } = req.query;
const stateCookie = cookie.parse(req.headers.cookie).state;
if (state !== stateCookie) {
return res.status(403).send('Request origin cannot be verified');
}
if (shop && hmac && code) {
// DONE: Validate request is from Shopify
const map = Object.assign({}, req.query);
delete map['signature'];
delete map['hmac'];
const message = querystring.stringify(map);
const providedHmac = Buffer.from(hmac, 'utf-8');
const generatedHash = Buffer.from(
crypto
.createHmac('sha256', config.oauth.client_secret)
.update(message)
.digest('hex'),
'utf-8'
);
let hashEquals = false;
try {
hashEquals = crypto.timingSafeEqual(generatedHash, providedHmac)
} catch (e) {
hashEquals = false;
};
if (!hashEquals) {
return res.status(400).send('HMAC validation failed');
}
// DONE: Exchange temporary code for a permanent access token
const accessTokenRequestUrl = 'https://' + shop + '/admin/oauth/access_token';
const accessTokenPayload = {
client_id: config.oauth.api_key,
client_secret: config.oauth.client_secret,
code,
};
request.post(accessTokenRequestUrl, { json: accessTokenPayload })
.then((accessTokenResponse) => {
const accessToken = accessTokenResponse.access_token;
// DONE: Use access token to make API call to 'shop' endpoint
const shopRequestUrl = 'https://' + shop + '/admin/shop.json';
const shopRequestHeaders = {
'X-Shopify-Access-Token': accessToken,
}
request.get(shopRequestUrl, { headers: shopRequestHeaders })
.then((shopResponse) => {
const response = JSON.parse(shopResponse);
const shopData = response.shop;
console.log('BEING CALLED TWICE...')
res.render('pages/brand_signup',{
shop: shopData.name
})
})
.catch((error) => {
res.status(error.statusCode).send(error.error.error_description);
});
})
.catch((error) => {
res.status(error.statusCode).send(error.error.error_description);
});
} else {
res.status(400).send('Required parameters missing');
}
});