I am trying to change locally a excel file and then upload it to s3.
var s3 = new aws.S3({ accessKeyId: config._ACCESS_KEY_ID, secretAccessKey: config._SECRET_ACCESS_KEY });
var dataFile = await wb.xlsx.readFile(file).then(function () {
var sh = wb.getWorksheet(config._sheetname);
for (let i = 2; i <= sh.rowCount; i++) {
let currRow = sh.getRow(i);
if (currRow.getCell(1).text == product_code) {
currRow.getCell(2).value = product_name
currRow.commit();
}
}
let updatedfile = wb.xlsx.writeFile(file).then(rs => { console.log('edit filed successfully ') });
return updatedfile
})
const params = {
Bucket: config._BUCKET_NAME,
Key: config._filename
};
s3.putObject(params, function (err, data) {
if (err) { console.log(err) }
let newversionId = data.VersionId!
console.log("Successfully uploaded data ", newversionId);
})
But the file uploaded is empty...I should add body in the param. But I don't know what should it be the value since it's a excel file. Any ideas?
Related
I am trying to download a file from Amazon S3, but the call is giving me network error. I have setup my amazon config file accordingly to need and I am using S3.getObjectURl() method to download it. Unfortunately it fails. Here is my code:
export const AWSDownload = (link = '') => {
const s3 = new AWS.S3({endpoint: 's3Url',region: 'us-east-1',accessKeyId: AWS_Config.AWS_ACCESS_KEY_ID, secretAccessKey:AWS_Config.AWS_SECRET_ACCESS_KEY})
const params = {
Bucket: AWS_Config.AWS_BUCKET,
Key: `${link}`
}
const handleDownload = () => {
return new Promise<string>((resolve, reject) => {
console.log('Handling download called')
s3.getObject(params, (err, data:any) => {
console.log(data, 'response')
if (err) {
console.log(err, err.stack)
reject(err)
} else {
if (data.Body) {
let binary = ''
const bytes:any = new Uint8Array(data.Body)
const len:any = bytes.byteLength
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i])
}
const b64encoded = `data:${data.ContentType};base64,${btoa(binary)}`
resolve(b64encoded)
} else {
reject('Empty body')
}
}
})
})
}
return {
downloadImage: handleDownload
}
}
export const AWS_Config = {
FILESYSTEM_DRIVER: 's3',
AWS_ACCESS_KEY_ID: 'KeyID',
AWS_SECRET_ACCESS_KEY: 'Secret Access Key',
AWS_DEFAULT_REGION: 'us-east-1',
AWS_BUCKET: 'Bucket'
}
The result is mentioned below:
Try using the latest S3 client via:
import { S3Client, PutObjectCommand } from '#aws-sdk/client-s3'
const s3Client = new S3Client({ region: process.env.region })
try {
const data = await s3Client.send(new PutObjectCommand(uploadParams))
console.log('Success', data)
} catch (err) {
console.log('Error', err)
}
or check your network connection 😉
Edit: Similarly look for getObjectCommand in the latest documentation and implement it.
I want a make an API that will take a file or folder path from the user and upload it to AWS s3 I made progress but
when the user gives a file path it's searching the file path in the server, not in the user's pc
I know I made a mistake but I don't know how to connect API from the users pc and get access to system files
here is code for the post route
router.post("/create/:id", auth, async (req, res) => {
try {
let form = new multiparty.Form();
form.parse(req, async (err, fields, files) => {
console.log(fields);
console.log(files);
//check if user has access to project
const user_id = req.userId;
const project_id = req.params.id;
const user_access = await check_user_access_project(user_id, project_id);
const user = await User.findById(user_id);
const project = await Project.findById(project_id);
if (user_access === 1) {
//create version
const version = new Version({
project_id: project_id,
user_id: user_id,
versionName: fields.versionName[0],
version_description: fields.versionDescription[0],
version_file: [],
});
const version_data = await version.save();
console.log(version_data);
let version_id = version_data._id;
//sync folders to s3
const version_folder_path = fields.files_path[0];
let key = `${user.firstName}_${user_id}/${project.projectName}/${fields.versionName[0]}`;
const version_folder_list = await sync_folders(
version_folder_path,
key
);
console.log("version folder list", version_folder_list);
//update version with version folders
await Version.findByIdAndUpdate(
version_id,
{
$set: {
version_file: version_folder_list,
},
},
{ new: true }
);
//wait for version update
await version.save();
//send response
res.json({
success: true,
version: version_data,
});
} else {
res.status(401).json({
success: false,
message: "User does not have access to project",
});
}
});
} catch (error) {
res.status(400).json({ message: error.message });
}
});
here is the folder sync code
const sync_folders = async (folder_path, key) => {
function getFiles(dir, files_) {
files_ = files_ || [];
var files = fs.readdirSync(dir);
for (var i in files) {
var name = dir + "/" + files[i];
if (fs.statSync(name).isDirectory()) {
getFiles(name, files_);
} else {
files_.push(name);
}
}
return files_;
}
const files = getFiles(folder_path);
console.log(files);
const fileData = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
console.log(file);
const fileName = file.split("/").pop();
const fileType = file.split(".").pop();
const fileSize = fs.statSync(file).size;
const filePath = file;
const fileBuffer = fs.readFileSync(filePath);
//folder is last part of folder path (e.g. /folder1/folder2/folder3)
const folder = folder_path.split("/").pop();
console.log("folder: " + folder);
//split filepath
const filePath_ = filePath.split(folder).pop();
let filekey = key + "/" + folder + filePath_;
console.log("filekey: " + filekey);
const params = {
Bucket: bucket,
Key: filekey,
Body: fileBuffer,
ContentType: fileType,
ContentLength: fileSize,
};
const data = await s3.upload(params).promise();
console.log(data);
fileData.push(data);
}
console.log("file data", fileData);
console.log("files uploaded");
return fileData;
};
if some buddy can help me pls I need your help
You need to post the item in a form rather than just putting the directory path of user in and then upload the result to your s3 bucket.
This might be a good start if you're new to it:
https://www.w3schools.com/nodejs/nodejs_uploadfiles.asp
export const liverecorddownload = async(req,res) => {
try{
const BUCKET = "***";
const PREFIX = '***';
var params = {
Bucket: BUCKET,
Prefix:PREFIX,
};
s3.listObjectsV2(params,async(err,data)=>{
if(err) console.log("bucket list object c",err)
else {
if(data && data.Contents && data.Contents.length>0){
var FILES = data && data.Contents
console.log('Get:', FILES);
const promises = [];
try {
for (let ii = 0; ii < FILES.length; ii++) {
const params = {
Bucket: BUCKET,
Key: `${FILES[ii].Key}`,
};
promises.push(s3.getObject(params).promise());
}
const results = await Promise.all(promises);
const buffers = results.map(result => result.Body);
const content = buffers.map(buffer => JSON.parse(zlib.unzipSync(buffer).toString()));
console.log('Read OK', JSON.stringify(content));
const merged = Object.assign({}, ...content);
console.log('Merged content', JSON.stringify(merged));
} catch (err) {
console.log(err, err.stack);
throw err;
}
}
}
})
}
catch(e){
console.log("appconfig",e)
return false
}
}
here above is my code and objects are .m3u8
When try to compile i got error like below
Error: incorrect header check
at Zlib.zlibOnError [as onerror] (zlib.js:187:17)
let me know whats i do wrong
Thanks in Advance
I am trying to extract multiple files from AWS S3 bucket and willing to merge the response from all files after.
E.g I have following files:
my-bucket/mainfile1.json.gz
my-bucket/mainfile2.json.gz
my-bucket/mainfile3.json.gz
Currently I am accessing a single file like this:
const unzipFromS3 = (key, bucket) => {
return new Promise(async (resolve, reject) => {
AWS.config.loadFromPath(process.env["PWD"]+'/private/awss3/s3_config.json');
var s3 = new AWS.S3();
let options = {
'Bucket': "my-bucket",
'Key': "mainfile1.json.gz",
};
s3.getObject(options, function(err, res) {
if(err) return reject(err);
resolve(zlib.unzipSync(res.Body).toString());
});
});
};
unzipFromS3().then(function(result){
console.dir(result);
});
Now this works perfect for single file, but how can I achieve this with multiple files in case I want to merge data from 3 separate files?
Here's an initial idea of how to read the gzipped JSON files from S3, unzip them, then merge the resulting JavaScript objects, and finally gzip and write the merged results back to S3.
const aws = require('aws-sdk');
const zlib = require('zlib');
const s3 = new aws.S3();
const BUCKET = 'mybucket';
const PREFIX = '';
const FILES = ['test1.json.gz', 'test2.json.gz', 'test3.json.gz'];
(async () => {
const promises = [];
try {
for (let ii = 0; ii < FILES.length; ii++) {
const params = {
Bucket: BUCKET,
Key: `${PREFIX}${FILES[ii]}`,
};
console.log('Get:', params.Key, 'from:', params.Bucket);
promises.push(s3.getObject(params).promise());
}
const results = await Promise.all(promises);
const buffers = results.map(result => result.Body);
const content = buffers.map(buffer => JSON.parse(zlib.unzipSync(buffer).toString()));
console.log('Read OK', JSON.stringify(content));
const merged = Object.assign({}, ...content);
console.log('Merged content', JSON.stringify(merged));
const params = {
Bucket: BUCKET,
Key: `${PREFIX}result/test.json.gz`,
Body: zlib.gzipSync(JSON.stringify(merged), 'utf8'),
};
console.log('Put:', params.Key, 'to:', params.Bucket);
const rc = await s3.putObject(params).promise()
} catch (err) {
console.log(err, err.stack);
throw err;
}
})();
I am trying to download a zip file in my MERN application. I am getting the file in the response, how ever the client does not download the actual file. I am using archiver to zip files then return them in a fetch call.
Archive Service:
const archiver = require('archiver')
const zip = archiver('zip')
const path = require('path')
const fs = require('fs')
const appDir = path.dirname(require.main.filename)
exports.FileArchiver = function (feed, res) {
// const app = this.app;
const uploadsDir = path.join(appDir, '/uploads/');
const templatesDir = path.join(appDir, '/templates/');
const feedArray = feed.feed.data;
const extensions = [".jpg", ".png", ".svg"];
const feedArrayString = JSON.stringify(feedArray);
const feedArrayObject = JSON.parse(feedArrayString);
let imageArray = [];
let templateType = 'b'; //test
// grab image names from object
feedArrayObject.forEach(function(x){iterate(x)});
// remove duplicates
imageArray = uniq_fast(imageArray);
// zip images
for (let i = 0; i < imageArray.length; i++) {
console.log(imageArray[i])
const filePath = path.join(uploadsDir, imageArray[i]);
zip.append(fs.createReadStream(filePath), { name: 'images/'+imageArray[i] });
}
res.attachment(feed.name + '.zip');
zip.pipe(res);
zip.append(feedArrayString, { name: 'feed.json' })
zip.directory(templatesDir + '/' + templateType, false);
zip.on('error', (err) => { throw err; });
zip.on('warning', (err) => {
if (err.code === 'ENOENT') {
console.log('ENOENT for archive')
} else {
throw err;
}
});
zip.finalize();
return this;
}
Client side fetch:
export const downloadData = (url, _id, name, type) => {
return fetch(url, {method: 'GET'})
.then((res) => {
console.log(res);
return res;
})
}
Client side Headers attached:
content-disposition: attachment; filename="a_gpo.zip"
content-type: application/zip
The network request response returns 200 status and I can also see the attachment response in client contains zip file jargon. However the client does not return the actual file download.
Try to redirect the browser location to the URL
export const downloadData = (url) => {
window.location = url;
}