Node.js script works once, then fails subsequently - javascript

I need a Node.js script that does the following:
1 - Triggers when an image is added to a specified S3 bucket. 2
- Creates a thumbnail of that image (360x203 pixels). 3 - Saves a copy of that thumbnail inside of a separate S3 folder. 4 -
Uploads the thumbnail to a specified FTP server, SIX (6) times using a
"FILENAME-X"naming convention.
The code works just as expected at first. The sample event pulls the image. Creates a thumnail. Saves it to the other S3 bucket. Then uploads it to the FTP server.
The problem: It works for the test file HappyFace.jpg once, but then each subsequent test fails. Also, I tried doing it with a different file, but was unsuccessful.
Also: If I could get some help writing a loop to name the different files that get uploaded, it would be very much appreciated. I usually code in PHP, so it'd probably take me longer than I hope to write.
Note: I removed my FTP credentials for privacy.
Problem Code Snippet:
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// S3 Bucket Upload Function Goes Here
}
Full Code:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var Client = require('ftp');
var fs = require('fs');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
// get reference to FTP client
var c = new Client();
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
// Get source bucket
var srcBucket = event.Records[0].s3.bucket.name;
// Get source object key
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var url = 'http://' + srcBucket + ".s3.amazonaws.com/" + srcKey;
// Set destination bucket
var dstBucket = srcBucket + "-thumbs";
// Set destination object key
var dstKey = "resized-" + srcKey;
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Transform the image buffer in memory.
this.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// Stream the thumb image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
// context.done();
}
);
};
The logs:
START RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Version: $LATEST
2015-10-12T21:55:20.481Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
2015-10-12T21:55:22.411Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Successfully resized images/HappyFace.jpg and uploaded to images-thumbs/resized-HappyFace.jpg
2015-10-12T21:55:23.432Z edc808c1-712b-11e5-aa8a-ed7c188ee86c CONNECTION SUCCESS!
END RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c
REPORT RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Duration: 3003.76 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 43 MB
Task timed out after 3.00 seconds
START RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Version: $LATEST
2015-10-12T22:08:55.910Z d347e7e3-712d-11e5-bfdf-05baa36d50fd Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
END RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd
REPORT RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Duration: 3003.33 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 17 MB
Task timed out after 3.00 seconds

The line:
var c = new Client();
is only going to get executed once; all calls to your handler() function will use the same instance of your FTP client.
If there could be multiple overlapping calls to handler()—and in an async world it sure seems likely—then the calls to the FTP client, including c.connect(…) and c.end() will be invoked multiple times against the same FTP client, which may already have an upload in progress, leading to a scenario like this:
Call to handler(). Upload begins.
Call to handler(). Second upload begins.
First upload completes and calls c.end().
Second upload is canceled.
The solution is to create a new FTP client instance for each upload or, if your FTP server has a problem with that (limits the number of client connections), you’ll need to serialize your uploads somehow. One way to do that, since you’re using the async library, would be to use async.queue.

Related

Linode Storage With NodeJs

I am new with linode. i see linode provide cloud storage just aws s3. i want to use it with my nodejs app.i can not find any sdk to do it like s3 any solution please help me .
any body tell me how can we upload file from nodejs to linode storage in javascript
new to linode too. Got my free $100 2 month trial and I figured I'd try the bucket feature.
I used AWS S3 in the past, this is pretty much identical as far as the SDK goes. The only hurdle here was to configure the endpoint. With AWS S3 you put the region, with linode you put the endpoint instead. The list of endpoints is here:
https://www.linode.com/docs/products/storage/object-storage/guides/urls/#cluster-url-s3-endpoint
As you didn't mention if you wanted an example on the server (nodejs) or the browser, I'll go with the one I've got. It's for nodejs (server side).
Steps
I used node stable (currently 18.7). I set up package.json to start the index.js script (e.g. "scripts": {"start": "node index.js"}).
Install aws-sdk
npm i aws-sdk
Code for index.js
const S3 = require('aws-sdk/clients/s3')
const fs = require('fs')
const config = {
endpoint: 'https://us-southeast-1.linodeobjects.com/',
accessKeyId: 'BLEEPBLEEPBLEEP',
secretAccessKey: 'BLOOPBLOOPBLOOP',
}
var s3 = new S3(config)
function listObjects() {
console.debug("List objects")
const bucketParams = {
Bucket: 'vol1'
}
s3.listObjects(bucketParams, (err, data) => {
if(err) {
console.error("Error ", err)
} else {
console.info("Objects vol1 ", data)
}
})
}
function uploadFile() {
const fileStream = fs.createReadStream('./testfile.txt')
var params = {Bucket: 'vol1', Key: 'testfile', Body: fileStream}
s3.upload(params, function(err, data) {
if(err) {
console.error("Error uploading test file", err)
} else {
console.info("Test file uploaded ", data)
listObjects()
}
})
}
// Start
uploadFile()
Run "npm start".
Output I get:
Test file uploaded {
ETag: '"0ea76c859582d95d2c2c0caf28e6d747"',
Location: 'https://vol1.us-southeast-1.linodeobjects.com/testfile',
key: 'testfile',
Key: 'testfile',
Bucket: 'vol1'
}
List objects
Objects vol1 {
IsTruncated: false,
Marker: '',
Contents: [
{
Key: 'Inflation isnt transitory.mp4',
LastModified: 2023-01-10T15:38:42.045Z,
ETag: '"4a77d408defc08c15fe42ad4e63fefbd"',
ChecksumAlgorithm: [],
Size: 58355708,
StorageClass: 'STANDARD',
Owner: [Object]
},
{
Key: 'testfile',
LastModified: 2023-02-13T20:28:01.178Z,
ETag: '"0ea76c859582d95d2c2c0caf28e6d747"',
ChecksumAlgorithm: [],
Size: 18,
StorageClass: 'STANDARD',
Owner: [Object]
}
],
Name: 'vol1',
Prefix: '',
MaxKeys: 1000,
CommonPrefixes: []
}
Adjust the config with your own creds/data center. Hope this helps.
Note: if you want to upload files > 1gb, you'll want to use the multipart upload feature. It's a bit more complex, but this should get you started. Any AWS S3 code example should do, there are plenty out there.

Facebook: Unable to upload local image/ video to Facebook page form server side using graph API

I am trying to upload a video to Facebook page from local storage using Form-data and graph API. You can have look at the code that I used below.
I have used multer to store the video file locally, that is received from the client side through API. This is the code.
var multer = require('multer');
var storage = multer.diskStorage({
destination: function (req, file, callback) {
callback(null, './utility/fb_files');
},
filename: function (req, file, callback) {
callback(null, file.originalname);
}
});
var upload_to_fb = multer({ storage : storage});
module.exports = upload_to_fb;
The file gets saved in local storage successfully. But when I try to upload the same file to Facebook page through graph API , I get an error. Here is the API code.
app.post('/api/document/post_to_fb_from_local', FB_upload.array('doc', 1), function (req, res, next) {
console.log("req.files ", req.files)
// console.log("req.files ", req)
let company_id = getCompanyId(req);
//console.log('company_id ' + company_id);
var activeuser = retrieveDetails(req);
if (
!(
req.files &&
req.query.task_id &&
req.query.action &&
req.query.fileType &&
req.query.accessToken &&
req.query.page_id &&
req.query.text
)
) {
return res.send(
{
status: false,
message: "can't upload fields missing"
});
}
let action = req.query.action;
//console.log("activeuser ", activeuser);
if (action == "video_image_post") {
if (req.query.fileType == "video") {
// var videoFile = new Blob(req.files.doc[0], { type: 'video/mp4' })
let vid = './utility/fb_files/' + req.files[0].filename;
let videoData = require('fs').createReadStream(vid);
console.log('./../../utility/fb_files/' + req.files[0].filename);
var url = 'https://graph.facebook.com/' + req.query.page_id + '/videos?access_token=' + req.query.accessToken +
'&source=' + videoData +
'&title=' + req.query.title +
'&description=' + req.query.text;
}
request({
url: url,
headers: {
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
},
method: 'POST'
}, function (error, resp) {
if (error || resp.statusCode != 200) {
console.log("Failed to post");
console.log(error);
console.log(resp.body);
body = resp.body;
if (body.error) {
var error = body.error.message;
console.log("body ", body)
console.error("Error returned from facebook: " + body.error.message);
if (body.error.code == 341) {
error = "You have reached the post limit for facebook. Please wait for 24 hours before posting again to facebook."
console.error(error);
}
res.send(error);
return;
}
} else {
console.log("posted successfully")
console.log("resp.body.id ", resp.body.id);
console.log("resp.body ", resp.body);
res.send({
"status": true,
"message": "posted successfully",
"body": resp.body
});
}
});
}
});
And the error I get when I try to upload a video to the Facebook page is shown below.
req.files [ { fieldname: 'doc',
2|document.service | originalname: 'VID-20171112-WA0001.mp4',
2|document.service | encoding: '7bit',
2|document.service | mimetype: 'video/mp4',
2|document.service | destination: './utility/fb_files',
2|document.service | filename: 'VID-20171112-WA0001.mp4',
2|document.service | path: 'utility\\fb_files\\VID-20171112-WA0001.mp4',
2|document.service | size: 99540 } ]
2|document.service | ./../../utility/fb_files/VID-20171112-WA0001.mp4
2|document.service | Failed to post
2|document.service | null
2|document.service | {"error":{"message":"There was a problem uploading your video file. Please try again.","type":"OAuthException","code":390,"error_subcode":1363030,"is_transient":true,"error_user_title":"Video Upload Timeout","error_user_msg":"Your video upload timed out before it could be completed. This is probably because of a slow network connection or because the video you're trying to upload is too large. Please try again.","fbtrace_id":"Aia2DbzWdjepS1yMJIzX1CE"}}
Please help me in resolving the issue here.
Thanks in advance

S3 node module, attempting to upload file gives ENETUNREACH error

I'm trying to upload a file to my Amazon S3 bucket but I'm getting an ENETUNREACH error. I do have permissions to upload/delete files for my buckets and also edited the CORS configuration to allow POST/GET requests from all origins. I'm thinking it might be a faulty key(s) that I received from someone. What is a good way to test if the keys I have are valid if that happens to be the issue?
Code below:
var s3 = require('s3');
/* Create a client for uploading or deleting files */
var client = s3.createClient({
maxAsyncS3: 20, // this is the default
s3RetryCount: 3, // this is the default
s3RetryDelay: 1000, // this is the default
multipartUploadThreshold: 20971520, // this is the default (20 MB)
multipartUploadSize: 15728640, // this is the default (15 MB)
s3Options: {
accessKeyId: 'xxxxxxxx',
secretAccesskey: 'xxxxxxxx',
region: 'xxxxxxxx'
},
});
exports.uploadFile = function(fileName, bucket){
console.log('Uploading File: ' +fileName+'\nBucket: ' +bucket);
var params = {
localFile: fileName,
s3Params: {
Bucket: bucket,
Key: 'testfile',
},
};
var uploader = client.uploadFile(params);
uploader.on('error', function(err) {
console.error("unable to upload:", err.stack);
});
uploader.on('progress', function() {
console.log("progress", uploader.progressMd5Amount, uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
console.log("done uploading");
});
};
Console log when trying to upload a txt small file:
Console Log
Disabled IIS services to fix my error.

Post Data, received by Express server with multer middleware, is truncated

I'm sending a 1.5MB string via a POST request to my Express server but when I look at the data received it's only 786 KB long.
The string I'm sending is a base64 image string. I've also tried to change the limits of multer to no avail.
CLIENT:
function upload(file) {
var form = new FormData(),
xhr = new XMLHttpRequest();
form.append('filename', "imageName.jpg");
form.append('imageData', file);
xhr.open('post', 'http://server/imgdata', true);
xhr.send(form);
}
SERVER:
app.use(multer({
dest: './uploads/',
// WILL RENAME THE RECEIVED FILE
rename: function(fieldname, filename) {
return filename + Date.now();
},
// UPLOAD HAS STARTED
onFileUploadStart: function(file) {
console.log(file.originalname + ' is starting ...')
},
// FILE HAS BEEN RECEIVED AND SAVED
onFileUploadComplete: function(file) {
console.log(file.fieldname + ' uploaded to ' + file.path)
done = true;
},
onFieldsLimit: function() {
console.log('Crossed fields limit!')
}
}));
app.post('/imgdata', function(req, res) {
// THIS IS RETURNS ONLY A PART OF THE DATA
res.json(req.body.image2);
var data = req.body.image2.replace(/^data:image\/\w+;base64,/, "");
var buf = new Buffer(data, 'base64');
// THE IMAGE IS SAVED BUT ONLY THE 786KB OF IT EVERY TIME REGARDLESS
// OF THE SIZE OF DATA SEND
fs.writeFile('image.jpg', buf, function(err) {
if (err) throw err;
console.log('It\'s saved!');
});
})
Changing the limits of the multer middleware didn't fix the issue,
limits: {
fieldNameSize : 100000,
fieldSize : 5242880
},
but as per #adeneo's suggestion I gave bodyParser an other go and after changing the limit of urlencoded like so
app.use(bodyParser.urlencoded({
extended: true,
limit: 100000000
}));
I received all the data on the server and successfully saved the image.

File upload is not working on Sails.js

I'm trying to upload an image to the server using the code below. It works fine for small images up to ~70KB.
For larger images it sets the file status to cancelled, saves to server disk but with a size of Zero KB.
var CustomerController = {
uploadPhoto: function (request, response) {
var customerId = request.param('id');
// check for the existence of the customer
Customer.findOne({
id: customerId
}).exec(function (err, customer) {
if (err || !customer) {
return response.notFound('Customer not found');
}
// get the file that was uploaded from the client
// and save it to disk
request.file('photo').upload(function onUploadComplete(err, files) {
if (err || !files || files.length < 1)
return response.serverError('Error uploading photo: ' + err);
// do something with files here
});
});
},
_config: {}
};
module.exports = CustomerController;
I'm using Sails 0.10.0-rc7.
Any ideas?

Categories