I want a make an API that will take a file or folder path from the user and upload it to AWS s3 I made progress but
when the user gives a file path it's searching the file path in the server, not in the user's pc
I know I made a mistake but I don't know how to connect API from the users pc and get access to system files
here is code for the post route
router.post("/create/:id", auth, async (req, res) => {
try {
let form = new multiparty.Form();
form.parse(req, async (err, fields, files) => {
console.log(fields);
console.log(files);
//check if user has access to project
const user_id = req.userId;
const project_id = req.params.id;
const user_access = await check_user_access_project(user_id, project_id);
const user = await User.findById(user_id);
const project = await Project.findById(project_id);
if (user_access === 1) {
//create version
const version = new Version({
project_id: project_id,
user_id: user_id,
versionName: fields.versionName[0],
version_description: fields.versionDescription[0],
version_file: [],
});
const version_data = await version.save();
console.log(version_data);
let version_id = version_data._id;
//sync folders to s3
const version_folder_path = fields.files_path[0];
let key = `${user.firstName}_${user_id}/${project.projectName}/${fields.versionName[0]}`;
const version_folder_list = await sync_folders(
version_folder_path,
key
);
console.log("version folder list", version_folder_list);
//update version with version folders
await Version.findByIdAndUpdate(
version_id,
{
$set: {
version_file: version_folder_list,
},
},
{ new: true }
);
//wait for version update
await version.save();
//send response
res.json({
success: true,
version: version_data,
});
} else {
res.status(401).json({
success: false,
message: "User does not have access to project",
});
}
});
} catch (error) {
res.status(400).json({ message: error.message });
}
});
here is the folder sync code
const sync_folders = async (folder_path, key) => {
function getFiles(dir, files_) {
files_ = files_ || [];
var files = fs.readdirSync(dir);
for (var i in files) {
var name = dir + "/" + files[i];
if (fs.statSync(name).isDirectory()) {
getFiles(name, files_);
} else {
files_.push(name);
}
}
return files_;
}
const files = getFiles(folder_path);
console.log(files);
const fileData = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
console.log(file);
const fileName = file.split("/").pop();
const fileType = file.split(".").pop();
const fileSize = fs.statSync(file).size;
const filePath = file;
const fileBuffer = fs.readFileSync(filePath);
//folder is last part of folder path (e.g. /folder1/folder2/folder3)
const folder = folder_path.split("/").pop();
console.log("folder: " + folder);
//split filepath
const filePath_ = filePath.split(folder).pop();
let filekey = key + "/" + folder + filePath_;
console.log("filekey: " + filekey);
const params = {
Bucket: bucket,
Key: filekey,
Body: fileBuffer,
ContentType: fileType,
ContentLength: fileSize,
};
const data = await s3.upload(params).promise();
console.log(data);
fileData.push(data);
}
console.log("file data", fileData);
console.log("files uploaded");
return fileData;
};
if some buddy can help me pls I need your help
You need to post the item in a form rather than just putting the directory path of user in and then upload the result to your s3 bucket.
This might be a good start if you're new to it:
https://www.w3schools.com/nodejs/nodejs_uploadfiles.asp
Related
I am trying to download all files located within a specific folder on a FTP site. The folder is called "IN" (as shown in example code) and contains a number of .csv files.
the requirement are this:
donwload to local folder all the csv present on the IN folder
move them from IN folder on the FTP to ARCHIVE folder on the ftp folder
Merge all the csv file to a new csv with all the content of the csv imported
i'm stuck to the secondo point. I use promise-ftp to do this process and here my code:
const fs = require("fs");
const Client = require("promise-ftp");
const sftp = new Client();
sftp
.connect({
host: "xxxxx",
port: 21,
user: "xxxx",
password: "xxxxx",
})
.then(() => {
console.log("connected");
// will return an array of objects with information about all files in the remote folder
return sftp.list("/htdocs/IN");
})
.then(async (data) => {
// data is the array of objects
len = data.length;
// x is one element of the array
await data.forEach((x) => {
let remoteFilePath = "/htdocs/IN/" + x.name;
if (x.type === "-") {
console.log(x.name);
sftp
.get(remoteFilePath)
.then((stream) => {
console.log("---------");
console.log(x.name);
return new Promise(function (resolve, reject) {
stream.once("close", resolve);
stream.once("error", reject);
stream.pipe(fs.createWriteStream("IN/" + x.name));
});
})
.then(() =>{
sftp
.rename("/htdocs/IN/" + x.name, "/htdocs/OUT/" + x.name)
.then(function () {
console.log("rinominato");
})
console.log("qui cancello: " + x.name);
});
}
});
}).then(() => {
sftp.end();
})
.catch((err) => {
console.log(err, "catch error");
});
Now this script download all the file on my local storage but when i tried to rename it to another folder i receive this error:
Unhandled rejection FtpConnectionError: can't perform 'rename' command when connection status is: disconnecting
i think that i'm make some mistake on where put the sftp.close function but i'm new on node.js.
Anyone could help me to understand better where i'm wrong?
Added new version of the code with more async and await but still not working:
const fs = require("fs");
const Client = require("promise-ftp");
const ftp = new Client();
const downloadAndStoreFile = async () => {
await ftp.connect({
host: "xxxx",
port: 21,
user: "xxx",
password: "xxx",
});
console.log ('connected');
let data = await ftp.list('/htdocs/IN');
data = data.filter(function (file) {
return file.type === "-" ? file : null;
});
console.log(data);
data.forEach((file)=>{
if (file.type === '-'){
//console.log(file.name);
const operation = async () => {
console.log(file.name);
const stream = await ftp.get("/htdocs/IN/" + file.name);
const streamResult = await stream.pipe(fs.createWriteStream("IN/" + file.name));
const renameResult = await ftp.rename("/htdocs/IN/" + file.name, "/htdocs/OUT/" + file.name);
};
operation();
}
});
await ftp.end();
};
downloadAndStoreFile();
thank you
Please try the await way to do stuff like this:
const fs = require("fs");
const Client = require("promise-ftp");
const sftp = new Client();
sftp
.connect({
host: "XXXX",
port: 21,
user: "XXX",
password: "XXX",
})
.then(() => {
console.log("connected");
// will return an array of objects with information about all files in the remote folder
return sftp.list("/htdocs/IN");
})
.then(async(data) => {
// data is the array of objects
len = data.length;
for (let i = 0; i < len; i++) {
let x = data[i]; // x is one element of the array
let remoteFilePath = "/htdocs/IN/" + x.name;
if (x.type === "-") {
console.log(x.name);
let stream = sftp.get(remoteFilePath);
console.log("---------");
console.log(x.name);
await stream.pipe(fs.createWriteStream("IN/" + x.name));
await sftp.rename("/htdocs/IN/" + x.name, "/htdocs/OUT/" + x.name)
console.log("rinominato");
console.log("qui cancello: " + x.name);
}
}
}).then(() => {
sftp.end();
})
.catch((err) => {
console.log(err, "catch error");
});
The error you are getting is quite descriptive:
Unhandled rejection FtpConnectionError: can't perform 'rename' command when connection status is: disconnecting
What we can conclude from it is that rename is being called while client tries to disconnect. The problem is with async calls being made in the forEach loop, since you are not waiting for them to settle before ending the connection.
One possible solution would be mapping operations to promise array and calling Promise.all.
Promise.all(
data
.filter((file) => file.type === "-")
.map((file) => {
const operation = async () => {
const stream = await ftp.get("/htdocs/IN/" + file.name);
const streamResult = await stream.pipe(
fs.createWriteStream("IN/" + file.name)
);
const renameResult = await ftp.rename(
"/htdocs/IN/" + file.name,
"/htdocs/OUT/" + file.name
);
};
return operation();
})
);
I am trying to change locally a excel file and then upload it to s3.
var s3 = new aws.S3({ accessKeyId: config._ACCESS_KEY_ID, secretAccessKey: config._SECRET_ACCESS_KEY });
var dataFile = await wb.xlsx.readFile(file).then(function () {
var sh = wb.getWorksheet(config._sheetname);
for (let i = 2; i <= sh.rowCount; i++) {
let currRow = sh.getRow(i);
if (currRow.getCell(1).text == product_code) {
currRow.getCell(2).value = product_name
currRow.commit();
}
}
let updatedfile = wb.xlsx.writeFile(file).then(rs => { console.log('edit filed successfully ') });
return updatedfile
})
const params = {
Bucket: config._BUCKET_NAME,
Key: config._filename
};
s3.putObject(params, function (err, data) {
if (err) { console.log(err) }
let newversionId = data.VersionId!
console.log("Successfully uploaded data ", newversionId);
})
But the file uploaded is empty...I should add body in the param. But I don't know what should it be the value since it's a excel file. Any ideas?
I'm using Node 12.x version to write my Lambda function. Here is the Parsing error that I am getting. What could be the reason?
Update
const im = require("imagemagick");
const fs = require("fs");
const os = require("os");
const uuidv4 = require("uuid/v4");
const {promisify} = require("util");
const AWS = require('aws-sdk');
const resizeAsync = promisify(im.resize)
const readFileAsync = promisify(fs.readFile)
const unlinkAsync = promisify(fs.unlink)
AWS.config.update({region: 'ap-south-1'})
const s3 = new AWS.S3();
exports.handler = async (event) => {
let filesProcessed = event.Records.map((record) => {
let bucket = record.s3.bucket.name;
let filename = record.s3.object.key;
//Fetch filename from S3
var params = {
Bucket: bucket,
Key: filename
};
//let inputData = await s3.getObject(params).promise()
let inputData = await s3.getObject(params).promise();
//Resize the file
let tempFile = os.tmpdir() + '/' + uuidv4() + '.jpg';
let resizeArgs = {
srcData: inputData.Body,
dstPath: tempFile,
width: 150
};
await resizeAsync(resizeArgs)
//Read the resized File
let resizedData = await readFileAsync(tempFile)
//Upload the resized file to S3
let targetFilename = filename.substring(0, filename.lastIndexOf('.') + '-small.jpg')
var params = {
Bucket: bucket + '-dest',
Key: targetFilename,
Body: new Buffer(resizedData),
ContentType: 'image/jpeg'
}
await s3.putObject(params).promise();
return await unlinkAsync(tempFile)
})
await Promise.all(filesProcessed)
return "done"
}
Here is the same code. I am getting Unexpected token S3 error when hovering the red mark (shown in the image)
What you can do is, declare inputData as below and initialize it with the response from the getObject.
let inputData;
var params = {
Bucket: "examplebucket",
Key: "HappyFace.jpg"
};
s3.getObject(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else inputData = data; // successful response
});
For more, you can refer here
I am trying to upload an image to firebase and then produce 2 thumbnails. I am able to do this with no problems. My current road block is when I write the urls to the realtime database, I am always getting the same url as the initial upload.
For example:
1st upload I get my uploaded image with the two proper thumbnails for the image
2nd upload I get my uploaded image with the two previous thumbnails (first image)
3rd upload I get my uploaded image with the first images thumbnails...
...this continues to reproduce the urls for the first upload
In my storage the correct thumbnails are being generated, but the urls are always for the first upload?
I don't know if this is a problem with the getSignedUrl() or not, really not sure whats going on here.
Here is my cloud function:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket); // The Storage object.
// console.log(object);
console.log(object.name);
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Sounrce fileName
await bucket.file(filePath).download({
destination: tmpFilePath
});
//3. resize the images and define an array of upload promises
const sizes = [64, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
//Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
//upload to gcs
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
// console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}).then((response) => {
const url = response[0];
if (size === 64) {
// console.log('generated 64');
return admin.database().ref('profileThumbs').child(fileName).set({ thumb: url });
} else {
// console.log('generated 128');
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
.catch(function (error) {
console.error(err);
return;
});
})
});
//4. Run the upload operations
await Promise.all(uploadPromises);
//5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
})
Cleaned up my code and solved my problem, here is how I generated the urls and passed them to the proper URLs by accessing the users UID and postId in the file path:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [64, 256];
const bucketDir = dirname(filePath);
console.log(userUid);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}.png`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
console.log(url);
if (err) {
console.error(err);
return;
}
if (size === 64) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else {
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})
I am trying to download a zip file in my MERN application. I am getting the file in the response, how ever the client does not download the actual file. I am using archiver to zip files then return them in a fetch call.
Archive Service:
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feed, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const feedArray = feed.feed.data;
const extensions = [".jpg", ".png", ".svg"];
const feedArrayString = JSON.stringify(feedArray);
const feedArrayObject = JSON.parse(feedArrayString);
let imageArray = [];
let templateType = 'b'; //test
// grab image names from object
feedArrayObject.forEach(function(x){iterate(x)});
// remove duplicates
imageArray = uniq_fast(imageArray);
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment(feed.name + '.zip');
zip.pipe(res);
zip.append(feedArrayString, { name: 'feed.json' })
zip.directory(templatesDir + '/' + templateType, false);
zip.on('error', (err) => { throw err; });
zip.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('ENOENT for archive')
} else {
throw err;
}
});
zip.finalize();
return this;
}
Client side fetch:
export const downloadData = (url, _id, name, type) => {
return fetch(url, {method: 'GET'})
.then((res) => {
console.log(res);
return res;
})
}
Client side Headers attached:
content-disposition: attachment; filename="a_gpo.zip"
content-type: application/zip
The network request response returns 200 status and I can also see the attachment response in client contains zip file jargon. However the client does not return the actual file download.
Try to redirect the browser location to the URL
export const downloadData = (url) => {
window.location = url;
}