I want to get file information from all the directories once i have that i have further implementation of code, but i am stuck here and getting error that is pasted in question any idea what is implemented wrong in below code ?
cron.js
var fs = require('fs');
var path = require('path');
var async = require('async');
var directories = ['./logs/dit', './logs/st','./logs/uat']
function cronJob() {
directories.forEach(function(dir){
var files = fs.readdir(dir);
async.eachSeries(files, function(file,callback) {
var filePath = path.join(dirPath, file);
var fileInfo = {};
fs.stat(filePath, function(err, stats) {
if (err) {
console.info("File doesn't");
} else {
fileInfo.fileDate = stats.birthtime;
fileInfo.filename = file;
console.log(fileInfo);
// compareDates(fileInfo,filePath);
// callback();
}
});
});
})
}
cronJob();
Error
s\Ulog-0\ulog\app\serverfiles\logs\dit'
at Error (native)
at Object.fs.readdirSync (fs.js:808:18)
at C:\Users\WebstormProjects\Ulog-0\ulog\app\serverfiles\cronJob
20
at Array.forEach (native)
at cronJob (C:\Users\WebstormProjects\Ulog-0\ulog\app\serverfile
obs.js:7:13)
at Object.<anonymous> (C:\Users\\WebstormProjects\Ulog-0\ulog\app
readdir is asynchronous so you need to wait for its callback to execute further operations.
It's hard to know what exactly is the problem, so I have included log statements.
Edit
var fs = require('fs');
var path = require('path');
var async = require('async');
var directories = ['/../../logs/dit', '/../../logs/st', '/../../logs/uat'];
// loop through each directory
async.eachSeries(directories, function (dir, cb1) {
var dir = __dirname + dir;
console.log('reading', dir);
// get files for the directory
fs.readdir(dir, function (err, files) {
if (err) return cb1(err);
// loop through each file
async.eachSeries(files, function (file, cb2) {
var filePath = path.resolve(dir + '/' + file);
// get info for the file
fs.stat(filePath, function (err, stats) {
if (err) return cb2(err);
var fileInfo = { fileDate: stats.birthtime, filename: file };
console.log('fileInfo', fileInfo);
compareDates(fileInfo, filePath);
cb2(null, fileInfo);
});
}, cb1);
});
}, function (err, fileInfos) {
if (err) {
console.info('error', err);
return;
}
// when you're done reading all the files, do something...
});
Related
My current code is something like this.
var fs = require('fs')
fs.appendFile('log.txt', 'new data', function (err) {
if (err) {
// append failed
} else {
// done
}
})
But it creates a new file outside the folder. How do i make it edit the log.txt file inside my folder?
You can use __dirname
var fs = require('fs')
fs.appendFile(__dirname + '/log.txt', 'new data', function (err) {
if (err) {
// append failed
} else {
// done
}
})
What you could do is use path.resolve() to get the absolute path.
var fs = require('fs');
const path = require('path');
fs.appendFile(path.resolve('log.txt'), 'new data', function (err) {
if (err) {
// append failed
} else {
// done
}
})
I'm new to node.js and trying to get files from FTP with ftp module and then I want to show files directory path in console. When I run server with npm start, I get all files(thats nice) but console returns empty array. If I run server again with npm start, console show every path(awesome), but not for new files. I have tried to solve my problem with callbacks and setTimeout with no result :C
var Client = require('ftp');
var c = new Client();
var path = require('path');
var fs = require('fs');
var dir = path.join(__dirname, '../excelTime/');
var files = fs.readdirSync(dir);
const connectionProperties = {
host: 'localhost',
user: 'user_geek',
port: '21'
}
Get files from ftp
const getFilesFromFTP = () => {
c.on('ready', function() {
c.list('./', function(err, list) {
if (err) throw err;
for(var i = 0; i < list.length; i++) {
(function(i) {
const name = list[i].name;
c.get('/' + name, function(err, stream) {
if (err) console.dir(err);
if(!err) {
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('./excelTime/' + name, {flags: 'w'}));
}
});
}).call(this, i);
}
c.end();
});
});
c.on('error', function(err) {
console.log(err)
});
c.connect(connectionProperties);
}
Get file path
const checkFolder = () => {
let promises = files.map(file => path.join(dir, file));
Promise.all(promises).then(console.log);
}
Call
setTimeout(function() {
checkFolder();
}, 3000);
getFilesFromFTP();
I'm new to node.js & express.js, so... I want to upload multiple files, and later work with them.
But i need to send a response (ok or error status) after all my files have been saved on disk, or if one failed - then send an error callback.
Now I have such code:
var express = require('express');
var router = express.Router();
var multipart = require('connect-multiparty');
var multipartMiddleware = multipart();
var fs = require('fs');
router.post('/upload', multipartMiddleware, function(req, res) {
var reqBody = req.body;
var reqFiles = req.files;
saveFile(reqFiles, 'main.xlsx', function(err) {
if (err) {
res.status(404).send('');
return;
}
res.send('Multi-File File uploaded');
}
});
function saveFile(file, name, callback) {
fs.writeFile('./uploads/' + name, file, callback);
}
but how can i change my code to parse this:
router.post('/upload', multipartMiddleware, function(req, res) {
var reqBody = req.body;
var reqFiles = req.files;
saveFile(reqFiles['files'][0], 'main.xlsx', function(err) {
if (err) {
res.status(404).send('');
return;
}
}
saveFile(reqFiles['files'][1], 'second.xlsx', function(err) {
if (err) {
res.status(404).send('');
return;
}
}
res.send(''); // only after first two fileUploaders have finished
});
function saveFile(file, name, callback) {
fs.writeFile('./uploads/' + name, file, callback);
}
You need to iterate through req.files. You can use async library.
For example:
async.each(req.files, function(file, callback) {
saveFile(file, file.name, callback)
}, function(err) {
res.send('')
})
The module recommends not using this. Use the multiparty module directly.
When you have the list of files they will also have file names. you can loop through those and save each file asynchronously. Then respond. Take a peek here
I have this express route with multer file-upload. When the upload is complete, I would like to encode the image to base64 and send with response.
However when I do it like this, the code tries to execute the base64 encoding before the file is created to the folder.
Edit: Added storage & upload functions
const storage = multer.diskStorage({
destination: (req, file, callback) => {
if (!fs.existsSync('./uploads')) {
fs.mkdirSync('./uploads');
}
let path = './uploads';
callback(null, path);
},
filename(req, file, cb) {
let fileExt = file.originalname.substring(file.originalname.lastIndexOf('.')).toLowerCase();
if (!imageFilter(fileExt)) {
return false;
} else {
cb(null, file.originalname);
}
},
onError: function (err, next) {
console.log('error', err);
next(err);
},
});
const upload = multer({
storage,
limits: {
fileSize: 1000 * 1000 * 2 // 2 MB
}
}).single('file');
router.post('/upload', function (req, res) {
var directory = 'uploads';
fs.readdir(directory, (err, files) => {
if (err) throw err;
for (var file of files) {
fs.unlink(path.join(directory, file), err => {
if (err) throw err;
});
}
});
upload(req, res, function (err) {
if (err) {
return res.status(404).json({
success: false,
message: 'File is too large. (Max 2MB)'
});
}
var file = req.file;
var base64str = base64_encode('./uploads/' + file.originalname);
return res.status(200).json({
success: true,
url: 'http://' + ip.address() + ':' + constants.PORT + '/api/uploads/' + file.originalname,
image: 'data:image/png;base64,' + base64str
});
});
});
What would be the smartest way to achieve the right order of operations. Possibly promises or async/await?
This solution worked for me :
Node v8.4.0 is required for this
//app.js
const fs = require('fs');
const express = require('express');
const cors = require('cors');
const bodyParser = require('body-parser');
const app = express();
app.use(cors({credentials: true, origin: 'http://localhost:4200'}));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
const Uploader = require('./Uploader.js');
const uploader = new Uploader();
app.post('/upload', uploader.startUpload);
//Uploader.js
const util = require("util");
const crypto = require("crypto");
const multer = require('multer');
class Uploader {
constructor() {
const storageOptions = multer.diskStorage({
destination: function(req, file, cb) {
cb(null, __dirname + '/uploads/')
},
filename: function(req, file, cb) {
crypto.pseudoRandomBytes(16, function(err, raw) {
cb(null, raw.toString('hex') + Date.now() + '.' + file.originalname);
});
}
});
this.upload = multer({ storage: storageOptions });
}
async startUpload(req, res) {
let filename;
try {
const upload = util.promisify(this.upload.any());
await upload(req, res);
filename = req.files[0].filename;
} catch (e) {
//Handle your exception here
}
return res.json({fileUploaded: filename});
}
}
Edit :
The library "util" provide you a "promisify" method which will give you the possibility to avoid something called the "callback hell". It converts a callback-based function to a Promise-based one.
This is a small example to understand my code above:
const util = require('util');
function wait(seconds, callback) {
setTimeout(() => {
callback();
}, seconds);
}
function doSomething(callType) {
console.log('I have done something with ' + callType + ' !');
}
// Default use case
wait(2 * 1000, () => {
doSomething('callback');
});
const waitPromisified = util.promisify(wait);
// same with promises
waitPromisified(2000).then((response) => {
doSomething('promise');
}).catch((error) => {
console.log(error);
});
// same with async/await
(async () => {
await waitPromisified(2 * 1000);
doSomething('async/await');
})();
Will print :
I have done something with callback !
I have done something with promise !
I have done something with async/await !
I have some troubles using multer and promises (bluebird).
I try to upload a pdf file in a folder then extract the text inside this pdf with the plugin (textract)
Both of the functions I created works, but I have some trouble in the execution and the promisification of the upload function. here's my code :
pdf-rest.js :
var bodyParser = require('body-parser');
var request = require('request');
var jsonParser = bodyParser.json();
var fs = require('fs');
var Promise = require('bluebird');
var multer = require('multer');
var textract = require('textract');
module.exports = function(app) {
var destination = "uploads";
var filename = "" + Date.now() + ".pdf";
var filePath = "C:\\wamp64\\www\\ebook-stage\\backend\\rest\\uploads\\" + filename;
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, destination);
},
filename: function (req, file, cb) {
cb(null, filename);
}
});
app.post('/rest/create_pdf', function (req, res) {
var upload = multer({storage: storage}).single('file');
function uploadFile() {
var uploadFilePromise = new Promise(function (resolve, reject) {
upload(req, res, function (err) {
if (err) {
reject(err);
res.end('error uploading file')
}
else {
res.end('file uploaded');
console.log('fileupload')
}
});
resolve(upload);
});
console.log(uploadFilePromise);
return uploadFilePromise;
}
function textractPdf(path) {
textract.fromFileWithPath(path, function (error, text) {
console.log('textract');
if (text) {
console.log(text);
return text;
}
else {
console.log(error);
return error;
}
});
}
uploadFile().then(textractPdf(filePath));
});
server.js :
var express = require('express');
var app = express();
require('./pdf-rest.js')(app);
app.listen(8080);
I use a button to execute this script in a HTML page, the upload of the pdf in the folder work the first time I click on the button but the textract doesn't.
I know I do something wrong, the execution of the function textractPdf(Path) is done first. I think I didn't promisify correctly my upload function.