how to copy files to remote server using scp2 in node js? - javascript

I want to copy the files from local server to remote server in Node js using scp2 package. First of all.. Files uploaded to local server using multer after that copy or move that files to remote server.
My Code:
exports.newFileUpload = function(req , res , next){
var storage = multer.diskStorage({ //multers disk storage settings
destination: function (req, file, cb) {
cb(null, 'uploads/');
},
filename: function (req, file, cb) {
var datetimestamp = Date.now();
cb(null, datetimestamp+ '-' +file.originalname);
}
});
var upload = multer({ storage: storage, limits: { fieldSize: 25 * 1024 * 1024 }}).array('file');
upload(req,res,function(err){
console.log(req.body);
console.log(req.files);
if(err){
res.json({error_code:1,err_desc:err});
console.log("Error Occured", err);
return;
}else{
client.scp(req.files[0].path, {
host: 'www.do********.in',
username: 'username',
password: '*********',
path: '/uploads/'
}, function(err) {
console.log(req.files[0].path);
console.log("files uploaded in remote server");
res.json({error_code:0,err_desc:null});
});
}
});
}
file upload to local server is perfectly working, after that to remote server throwing error
Error:
{ date: 'Mon Nov 13 2017 01:00:22 GMT+0530 (India Standard Time)',
process:
{ pid: 5664,
uid: null,
gid: null,
cwd: 'D:\\sample',
execPath: 'C:\\Program Files\\nodejs\\node.exe',
version: 'v8.2.1',
argv: [ 'C:\\Program Files\\nodejs\\node.exe', 'D:\\sample\\app.js' ],
memoryUsage:
{ rss: 69619712,
heapTotal: 45162496,
heapUsed: 39166256,
external: 149849 } },
os: { loadavg: [ 0, 0, 0 ], uptime: 3537.1088452 },
trace:
[ { column: 11,
file: 'util.js',
function: 'Object.exports._errnoException',
line: 1024,
method: '_errnoException',
native: false },
{ column: 20,
file: 'util.js',
function: 'exports._exceptionWithHostPort',
line: 1047,
method: '_exceptionWithHostPort',
native: false },
{ column: 14,
file: 'net.js',
function: 'TCPConnectWrap.afterConnect [as oncomplete]',
line: 1150,
method: 'afterConnect [as oncomplete]',
native: false } ],
stack:[ 'Error: Can\'t set headers after they are sent.',
' at validateHeader (_http_outgoing.js:504:11)',
' at ServerResponse.setHeader (_http_outgoing.js:511:3)',
' at ServerResponse.header (D:\\sample\\node_modules\\express\\lib\\response.js:730:10)',
' at ServerResponse.send (D:\\sample\\node_modules\\express\\lib\\response.js:170:12)',
' at ServerResponse.json (D:\\sample\\node_modules\\express\\lib\\response.js:256:15)',
' at D:\\sample\\routes\\part.js:302:10',
' at Client.closeHandler (D:\\sample\\node_modules\\scp2\\lib\\scp.js:48:13)',
' at emitNone (events.js:105:13)',
' at Client.emit (events.js:207:7)',
' at Client.<anonymous> (D:\\sample\\node_modules\\scp2\\lib\\client.js:88:10)',
' at emitNone (events.js:105:13)',
' at Client.emit (events.js:207:7)',
' at Socket.<anonymous> (D:\\sample\\node_modules\\ssh2\\lib\\client.js:225:10)',
' at emitOne (events.js:115:13)',
' at Socket.emit (events.js:210:7)',
' at Pipe._handle.close [as _onclose] (net.js:549:12)' ] }
Couldn't able to identify the error, awaiting suggestions or possible ways to solve the problem.
Thanks in Advance !

Uploading Files to remote server using multer is not possible directly, But we can play around with multer-sftp, scp, ssh techniques Methodologies
When Uploading Files to remote server using Node Js, We need to take care few things
Username and password should be correct
The Corresponding port number should be open in remote server
The Remote Directory Should have Write Access
Working Code using scp2 to move the files to remote server:
exports.newFileUpload = function(req , res , next){
var storage = multer.diskStorage({ //multers disk storage settings
destination: function (req, file, cb) {
cb(null, 'uploads/');
},
filename: function (req, file, cb) {
var datetimestamp = Date.now();
cb(null, datetimestamp+ '-' +file.originalname);
}
});
var upload = multer({ storage: storage, limits: { fieldSize: 25 * 1024 * 1024 }}).array('file');
upload(req,res,function(err){
console.log(req.body);
console.log(req.files);
if(err){
res.json({error_code:1,err_desc:err});
console.log("Error Occured", err);
return;
}else{
client.scp(req.files[0].path, {
host: 'host',
username: 'username',
password: '*********',
path: '/uploads/'
}, function(err) {
console.log(req.files[0].path);
console.log("files uploaded in remote server");
});
}
});
}
Note: Should install required packages as well as declare require things in code.
References: multer-sftp, scp2 , node-ssh

Related

MulterError: Unexpected field (Upload image in formdata)

I am trying to upload a image file but get the same error on server side no matter what I try, If someone can please tell me what I am missing.
javascript:
const filePicker = document.getElementById('takePhoto');
const myFile = filePicker.files[0];
var formData = new FormData;
formData.append('myFile', myFile)
fetch(appURL+'onlineHelp/questionImage', {
method: 'POST',
body: formData
})
Formdata posting up:
myFile: (binary)
server side
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'upload/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({storage: storage});
onlineHelp.post('/questionImage', upload.single("myFile"), (req, res, next)=>{
res.send("received")
next(
})
error:
MulterError: Unexpected field
at wrappedFileFilter (C:\Users\annet\Documents\ALS homeworx API\node_modules\multer\index.js:40:19)
at Busboy.<anonymous> (C:\Users\annet\Documents\ALS homeworx API\node_modules\multer\lib\make-middleware.js:114:7)
at Busboy.emit (events.js:198:13)
at Busboy.emit (C:\Users\annet\Documents\ALS homeworx API\node_modules\busboy\lib\main.js:38:33)
at PartStream.<anonymous> (C:\Users\annet\Documents\ALS homeworx API\node_modules\busboy\lib\types\multipart.js:213:13)
at PartStream.emit (events.js:198:13)
at HeaderParser.<anonymous> (C:\Users\annet\Documents\ALS homeworx API\node_modules\dicer\lib\Dicer.js:51:16)
at HeaderParser.emit (events.js:198:13)
at HeaderParser._finish (C:\Users\annet\Documents\ALS homeworx API\node_modules\dicer\lib\HeaderParser.js:68:8)
at SBMH.<anonymous> (C:\Users\annet\Documents\ALS homeworx API\node_modules\dicer\lib\HeaderParser.js:40:12)
I using this code to upload images or files to server nodejs.
javascript:
const formData = new FormData()
formData.photo = file
var res = await fetch('/your api', {
method: 'PATCH',
body: formData,
})
Server side:
router.patch('/avatar', auth, async (req, res, next) => {
if (req.files) {
let photo = req.files.photo;
if (photo.size < 3000000) {
var random = Math.floor(Math.random() * 9999999 + 10);
if (!photo.mv('./public/uploads/users/avatars/' + random + "_avatar." + photo.name.split('.').pop())) {
return res.status(400).json({ "status": "error", "data": "server cann't upload" });
}
Users.findByIdAndUpdate(req.userId, { $set: { avatar: + random + "_avatar." + photo.name.split('.').pop(), update_in: Date.now() } }, function (err, user) {
if (err) {
return res.status(400).json({ "status": "error", "msg": err });
}
Users.findById(req.userId).select("-password -role -sms -sms_time -__v").exec(function (err, user) {
return res.status(200).json({ "status": "success", "data": user }); //user update shod
});
});
} else {
return res.status(400).json({ "status": "error", "msg": "Photo size should be maximum 3MB" });
}
} else {
return res.status(400).json({ "status": "error", "msg": "Image not found" });
}
});
In server.js app , you should use this code:
const fileUpload = require('express-fileupload');
app.use(fileUpload({
createParentPath: true
}));
I belive you need you present the directory in an object like
cb(null, { fieldName: "temp_upload" });
Also, just something to be aware of, if you plan on this actually being a production app and the file will need to be accessed on the website you will want to look at using an object store of some sort (s3 or other provider) simply because you more then likely have node running as a cluster ( meaning each instance uses it own thread pool and does not talk to the other ) So if a user uploads on instance 1 instance 2 will not know of the file.
Multar has an s3 plugin thats really easy to use but if your just doing point-in-time upload and reading via the node stack (uploading say an xml and then reading the file in the same process you should be ok )

upload files to remote server using multer sftp in express Node js?

I'm trying to upload the files to remote server using multer-sftp in node js. since i'm following the official docs npm multer-sftp. Previously i've uploading the files to Amazon S3 instead of remote server. now i want to upload the files to remote server.
API:
exports.newFileUpload = function(req , res , next){
var storage = sftpStorage({
sftp: {
host: 'http://www.port*****es.in/',
port: 22,
username: 'username',
password: 'password'
},
destination: function (req, file, cb) {
cb(null, 'images/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({ storage: storage }).array('file');
upload(req,res,function(err){
logger.debug(JSON.stringify(req.body));
logger.debug(JSON.stringify(req.files));
if(err){
logger.debug("Error Occured", JSON.stringify(err));
res.json({error_code:1,err_desc:err});
return;
} else{
res.json({error_code:0,err_desc:null});
}
});
}
While uploading the file, returning the error
2017-11-10T02:39:48.297Z - debug: Error Occured {"code":"ENOTFOUND","errno":"ENOTFOUND",
"syscall":"getaddrinfo","hostname":"http://www.port****es.in/","host":"http://www.port****es.in/",
"port":22,"level":"client-socket","storageErrors":[]}
And also port no 22 is open in my domain. Awaiting Suggestions,
Thanks in Advance.
For Your Error, there are two possibilities
port no 22 is not open state, also not able to access that folder
Check your folder directory in domain
Uploading Files to remote server using multer-sftp is easy and flexible way.
also we can upload the files to remote server with scp, ssh techniques in node js.
Working Code:
exports.newFileUpload = function(req , res , next){
var storage = sftpStorage({
sftp: {
host: 'hostname',
port: 22,
username: 'username',
password: 'password'
},
destination: function (req, file, cb) {
cb(null, 'images/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({ storage: storage }).array('file');
upload(req,res,function(err){
logger.debug(JSON.stringify(req.body));
logger.debug(JSON.stringify(req.files));
if(err){
logger.debug("Error Occured", JSON.stringify(err));
res.json({error_code:1,err_desc:err});
} else{
logger.debug("Files uploaded successfully");
res.json({error_code:0,err_desc:null});
}
});
}
Note: When using 'multer-sftp' port no 22 is open in remote server.
Hope it helps !

ngFileUpload and Multer - saving uploaded file to a folder

I'm (very) new to angular MEAN and I'm trying to upload a file (pdf, specifically) and save it to server. I know it's probably a stupid question, but I cannot find any example on the server on how to actually save the uploaded file to the server's storage
I'm using ng-file-upload directive from https://github.com/danialfarid/ng-file-upload, Express for server, and ofc, AngularJS for the file upload.
POST UPDATED!! See below
More info: I'm using Yeoman's full mean stack generator for this project
UPDATE:
I've tried using multer (https://github.com/expressjs/multer) to save the uploaded file to server. I got this error when trying to upload the file (it returns 500 error)
Error: Unexpected field
at makeError ({proj_folder}/node_modules/multer/lib/make-error.js:12:13)
at wrappedFileFilter ({proj_folder}/node_modules/multer/index.js:39:19)
at Busboy.<anonymous> ({proj_folder}/node_modules/multer/lib/make-middleware.js:112:7)
at emitMany (events.js:127:13)
at Busboy.emit (events.js:201:7)
at Busboy.emit ({proj_folder}/node_modules/busboy/lib/main.js:31:35)
at PartStream.<anonymous> ({proj_folder}/node_modules/busboy/lib/types/multipart.js:213:13)
at emitOne (events.js:96:13)
at PartStream.emit (events.js:188:7)
at HeaderParser.<anonymous> ({proj_folder}/node_modules/dicer/lib/Dicer.js:51:16)
at emitOne (events.js:96:13)
at HeaderParser.emit (events.js:188:7)
at HeaderParser._finish ({proj_folder}/node_modules/dicer/lib/HeaderParser.js:68:8)
at SBMH.<anonymous> ({proj_folder}/node_modules/dicer/lib/HeaderParser.js:40:12)
at emitMany (events.js:127:13)
at SBMH.emit (events.js:201:7)
updated HTML
<form accept-charset="UTF-8" class="form" name="form" ng-submit="$ctrl.submitForm(form)"
enctype="multipart/form-data">
...
<input ngf-select ng-model="$ctrl.paperFile" ngf-model-options="{allowInvalid: true}" name="paper" ngf-accept="'application/pdf'" required="" type="file" >
...
</form>
submitForm method
...
this.Upload.upload({
url:'/paperUpload',
method: 'POST',
file: this.paperFile,
fields:{
_id:this.user._id
}
})
.then(function(resp){
console.log('Success upload');
console.log(resp.data);
}, function(error){
console.log('fail upload');
console.log(error);
}, function(evt){
console.log('upload on progress');
console.log(evt);
});
Server route:
var express = require('express');
var multer = require('multer');
var router = express.Router();
var upload = multer({dest:'uploads/',
rename: function(fieldname, filename){
return filename+"_"+Date.now();
}});
router.post('/paperUpload', upload.single('paper'), uploadPaper);
...
//method to upload
export function uploadPaper(req,res){
res.status(204).end();
}
The folder 'uploads' is created, but the file is not uploaded and always returned fail
Any help is appreciated,
Thank you
Do these steps
npm install ng-file-upload
include ng-file-upload.min.js in your angular index .html
Use this example to copy form to your angular page from where u want to upload file. -- http://jsfiddle.net/0m29o9k7/
Copy this code outside of any form which is already there:
Change this url from example code to where you want to upload files -- url: 'https://angular-file-upload-cors-srv.appspot.com/upload',
In your server.js or app.js which ever you are using as (node server.js) to start app add these lines
var crypto = require('crypto');
var mime = require('mime');
var multer = require('multer');
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'app/app-content/images/')
},
filename: function (req, file, cb) {
crypto.pseudoRandomBytes(16, function (err, raw) {
cb(null, raw.toString('hex') + Date.now() + '.' + mime.extension(file.mimetype));
});
}
});
var upload = multer({ storage: storage });
// make '/app' default route
app.post('/', upload.any(), function (req, res) {
res.send(req.files);
});
Change 'app/app-content/images/' this where you want your uploaded file will be
This code points your file upload url to index of your node server.
then you will be able to see the uploaded file.
Try this, I have never seen the 'file' or 'fields' options of Upload.upload. Do you know those are working? I just overcame a similar error by adding the arrayField to my POST call.
if you get the same error try removing 'fields' and adding a new key to the data object with the _id you wish to pass along.
this.Upload.upload({
url:'/paperUpload',
method: 'POST',
headers: {
'Content-Type': 'multipart/form-data ; boundary = ----WebKitFormBoundaryvlb7BC9EAvfLB2q5'
},
arrayKey: '',
data: {
paper: this.paperFile,
},
fields:{
_id:this.user._id
}
})
.then(function(resp){
console.log('Success upload');
console.log(resp.data);
}, function(error){
console.log('fail upload');
console.log(error);
}, function(evt){
console.log('upload on progress');
console.log(evt);
});

NodeJS uploading images with Multer, getting "Error: unexpected field"

I'm getting "Error: Unexpected field" when trying to process and store some uploaded images via NgFileUpload. Here's my Angular code:
HTML:
<div ngf-drop="upload($files)" ngf-select="upload($files)" ng-model="files"
ngf-drag-over-class="'dragover'" ngf-multiple="true" ngf-pattern="'image/*'"
ngf-accept="'image/*'" ngf-max-size="2MB" ngf-min-height="100"
ngf-resize="{width: 100, height: 100}" ngf-keep="'distinct'"
name="artistUpload" class="drop-box">
Drop images here or click to upload
</div>
AngularJS:
$scope.$watch('files', function () {
$scope.upload($scope.files);
});
$scope.upload = function (files) {
if (files && files.length) {
console.log(files);
Upload.upload({
url: '/api/photos/upload/',
arrayKey: '',
data: {
files: files
}
}).then(function (response) {
$timeout(function () {
$scope.result = response.data;
});
}, function (response) {
if (response.status > 0) {
$scope.errorMsg = response.status + ': ' + response.data;
}
}, function (evt) {
$scope.progress =
Math.min(100, parseInt(100.0 * evt.loaded / evt.total));
});
}
};
Express:
var multer = require('multer')
var storage = multer.diskStorage({
destination: function (req, file, cb) {
console.log(req);
cb(null, '/private/img/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname)
}
})
var upload = multer({ storage: storage })
app.post('/api/photos/upload/', upload.array('photos', 6), function (req, res, next) {
console.log("Files saved");
console.log(req.files);
next();
})
This question suggests that arrayKey: '' can solve the issue, but this hasn't worked for me at all. It's a pretty useless error from Multer! Any idea what I'm doing wrong here?
EDIT: Here's the error coming out of Node:
Error: Unexpected field
at makeError (root/node_modules/multer/lib/make-error.js:12:13)
at wrappedFileFilter (root/node_modules/multer/index.js:39:19)
at Busboy.<anonymous> (root/node_modules/multer/lib/make-middleware.js:112:7)
at emitMany (events.js:108:13)
at Busboy.emit (events.js:182:7)
at Busboy.emit (root/node_modules/busboy/lib/main.js:31:35)
at PartStream.<anonymous> (root/node_modules/busboy/lib/types/multipart.js:213:13)
at emitOne (events.js:77:13)
at PartStream.emit (events.js:169:7)
at HeaderParser.<anonymous> (root/node_modules/dicer/lib/Dicer.js:51:16)
at emitOne (events.js:77:13)
at HeaderParser.emit (events.js:169:7)
at HeaderParser._finish (root/node_modules/dicer/lib/HeaderParser.js:68:8)
at SBMH.<anonymous> (root/node_modules/dicer/lib/HeaderParser.js:40:12)
at emitMany (events.js:108:13)
at SBMH.emit (events.js:182:7)
at SBMH._sbmh_feed (root/node_modules/streamsearch/lib/sbmh.js:159:14)
at SBMH.push (root/node_modules/streamsearch/lib/sbmh.js:56:14)
at HeaderParser.push (root/node_modules/dicer/lib/HeaderParser.js:46:19)
at Dicer._oninfo (root/node_modules/dicer/lib/Dicer.js:197:25)
at SBMH.<anonymous> (root/node_modules/dicer/lib/Dicer.js:127:10)
at emitMany (events.js:108:13)
Upload name and the fieldname in <img> is different. You need to match them both.
On server side keep both values same upload.array('artistUpload')

Node.js script works once, then fails subsequently

I need a Node.js script that does the following:
1 - Triggers when an image is added to a specified S3 bucket. 2
- Creates a thumbnail of that image (360x203 pixels). 3 - Saves a copy of that thumbnail inside of a separate S3 folder. 4 -
Uploads the thumbnail to a specified FTP server, SIX (6) times using a
"FILENAME-X"naming convention.
The code works just as expected at first. The sample event pulls the image. Creates a thumnail. Saves it to the other S3 bucket. Then uploads it to the FTP server.
The problem: It works for the test file HappyFace.jpg once, but then each subsequent test fails. Also, I tried doing it with a different file, but was unsuccessful.
Also: If I could get some help writing a loop to name the different files that get uploaded, it would be very much appreciated. I usually code in PHP, so it'd probably take me longer than I hope to write.
Note: I removed my FTP credentials for privacy.
Problem Code Snippet:
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// S3 Bucket Upload Function Goes Here
}
Full Code:
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var util = require('util');
var Client = require('ftp');
var fs = require('fs');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
// get reference to FTP client
var c = new Client();
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
// Get source bucket
var srcBucket = event.Records[0].s3.bucket.name;
// Get source object key
// Object key may have spaces or unicode non-ASCII characters.
var srcKey =
decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
var url = 'http://' + srcBucket + ".s3.amazonaws.com/" + srcKey;
// Set destination bucket
var dstBucket = srcBucket + "-thumbs";
// Set destination object key
var dstKey = "resized-" + srcKey;
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return;
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return;
}
// Download the image from S3, transform, and upload to a different S3 bucket.
async.waterfall([
function download(next) {
// Download the image from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function transform(response, next) {
gm(response.Body).size(function(err, size) {
// Transform the image buffer in memory.
this.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Upload test file to FTP server
c.append(data, 'testing.jpg', function(err) {
console.log("CONNECTION SUCCESS!");
if (err) throw err;
c.end();
});
// Connect to ftp
c.connect({
host: "",
port: 21, // defaults to 21
user: "", // defaults to "anonymous"
password: "" // defaults to "#anonymous"
});
// Stream the thumb image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket + '/' + srcKey +
' and upload to ' + dstBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + srcKey +
' and uploaded to ' + dstBucket + '/' + dstKey
);
}
// context.done();
}
);
};
The logs:
START RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Version: $LATEST
2015-10-12T21:55:20.481Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
2015-10-12T21:55:22.411Z edc808c1-712b-11e5-aa8a-ed7c188ee86c Successfully resized images/HappyFace.jpg and uploaded to images-thumbs/resized-HappyFace.jpg
2015-10-12T21:55:23.432Z edc808c1-712b-11e5-aa8a-ed7c188ee86c CONNECTION SUCCESS!
END RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c
REPORT RequestId: edc808c1-712b-11e5-aa8a-ed7c188ee86c Duration: 3003.76 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 43 MB
Task timed out after 3.00 seconds
START RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Version: $LATEST
2015-10-12T22:08:55.910Z d347e7e3-712d-11e5-bfdf-05baa36d50fd Reading options from event: { Records: [ { eventVersion: '2.0', eventTime: '1970-01-01T00:00:00.000Z', requestParameters: { sourceIPAddress: '127.0.0.1' }, s3: { configurationId: 'testConfigRule', object: { eTag: '0123456789abcdef0123456789abcdef', sequencer: '0A1B2C3D4E5F678901', key: 'HappyFace.jpg', size: 1024 }, bucket: { arn: 'arn:aws:s3:::images', name: 'images', ownerIdentity: { principalId: 'EXAMPLE' } }, s3SchemaVersion: '1.0' }, responseElements: { 'x-amz-id-2': 'EXAMPLE123/5678abcdefghijklambdaisawesome/mnopqrstuvwxyzABCDEFGH', 'x-amz-request-id': 'EXAMPLE123456789' }, awsRegion: 'us-east-1', eventName: 'ObjectCreated:Put', userIdentity: { principalId: 'EXAMPLE' }, eventSource: 'aws:s3' } ] }
END RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd
REPORT RequestId: d347e7e3-712d-11e5-bfdf-05baa36d50fd Duration: 3003.33 ms Billed Duration: 3000 ms Memory Size: 128 MB Max Memory Used: 17 MB
Task timed out after 3.00 seconds
The line:
var c = new Client();
is only going to get executed once; all calls to your handler() function will use the same instance of your FTP client.
If there could be multiple overlapping calls to handler()—and in an async world it sure seems likely—then the calls to the FTP client, including c.connect(…) and c.end() will be invoked multiple times against the same FTP client, which may already have an upload in progress, leading to a scenario like this:
Call to handler(). Upload begins.
Call to handler(). Second upload begins.
First upload completes and calls c.end().
Second upload is canceled.
The solution is to create a new FTP client instance for each upload or, if your FTP server has a problem with that (limits the number of client connections), you’ll need to serialize your uploads somehow. One way to do that, since you’re using the async library, would be to use async.queue.

Categories