I tried to save the audio file from client and sent it to dialogflow for detection. But I couldn't get any error message. SessionClient works fine if I move out of req.pipe(). The request is a wav file.
router.post('/upload', function (req, res, next) {
var timeCode = new Date();
const filename = 'public/voiceFiles/' + timeCode.getTime() + '.wav';
req.pipe(fs.createWriteStream(filename))
.on('error', (e) => res.status(500).end(e.message))
.on('close', () => {
const readFile = util.promisify(fs.readFile);
readFile(filename)
.then(inputAudio => {
// The audio query request
const request = {
session: sessionPath,
queryInput: {
audioConfig: {
audioEncoding: encoding,
sampleRateHertz: sampleRateHertz,
languageCode: languageCode,
},
},
inputAudio: inputAudio,
};
// Recognizes the speech in the audio and detects its intent.
return sessionClient.detectIntent(request);
})
.then(responses => {
console.log('Detected intent:');
res.end(JSON.stringify(responses));
})
.catch(err => {
console.error('ERROR:', err);
res.end(err)
});
// [END dialogflow_detect_intent_audio]
})
});
Related
I am upload large video file to server then s3 bucket. But now I am getting memory out issue due to Buffer.From used. How can I save it from memory leak? Memory not clear after stream finished.
Note:
I want to maintain stream pipe so that I can modify stream before uploading to s3.
const passToS3 = new PassThrough();
const s3 = new AWS.S3({
accessKeyId: config.access_key,
secretAccessKey: config.secret_key,
});
const fileReadStream = new Readable({
read(size) {
if (!size) this.push(null);
else this.push();
},
});
const fileUploadPromise = [];
const busboyUpload = (req) =>
new Promise((resolve, reject) => {
const busboy = Busboy({ headers: req.headers })
busboy.on("file", async (fieldname, file, filename, encoding, mimeType) => {
console.log(filename)
fileUploadPromise.push(
new Promise((res, rej) => {
s3.upload(
{
Bucket: // bucket-name,
Key: //filename,
Body: passToS3,
contentDisposition: "attachment",
},
(err, data) => {
if (err) {
rej();
} else {
console.log(data, "===>")
res({ ...data, originalname: filename, mimeType });
}
}
);
})
);
fileReadStream.pipe(passToS3);
file.on("data", async (nextChunk) => {
fileReadStream.push(Buffer.from(nextChunk)); // memory--leak xxxxxxxxxxxx
});
file.on("end", () => {
fileReadStream.push(null);
});
});
busboy.on("finish", () => {
Promise.all(fileUploadPromise)
.then((data) => {
console.log(data[0])
resolve(data[0]);
})
.catch((err) => {
reject(err);
});
});
req.pipe(busboy);
});
app.post("/file-upload", async (req, res) => {
try {
console.log("dfjalkdf")
const uploadedFileData = await busboyUpload(req);
req.file = uploadedFileData;
res.sendStatus(200);
} catch (err) {
console.log("Error==>",err)
res.sendStatus(500);
}
});
app.listen(5000, () => {
console.log("server is running");
});
fileReadStream.push(Buffer.from(nextChunk));
this is creating a memory allocation and now After finish memory still have the memory maps.
Whenever streamedItem.eventName === "INSERT" for my table, I would like to POST streamedItem.dynamodb.NewImage (I think) to Zapier, so I can automate social media posting.
Below is my current file, and I am getting the error on Cloudwatch of ERROR connect ECONNREFUSED 127.0.0.1:3000.
/* Amplify Params - DO NOT EDIT
ENV
REGION
ZAPIER_URL
Amplify Params - DO NOT EDIT */
/**
* #type {import('#types/aws-lambda').APIGatewayProxyHandler}
*/
const http = require("http"); // or https
const defaultOptions = {
host: "localhost",
port: 3000, // or 443 for https
headers: {
"Content-Type": "application/json",
},
};
const post = (path, payload) =>
new Promise((resolve, reject) => {
const options = { ...defaultOptions, path, method: "POST" };
console.log("OPTIONS", options);
const req = http.request(options, (res) => {
let buffer = "";
res.on("data", (chunk) => (buffer += chunk));
console.log("buffer", buffer);
res.on("end", () => resolve(JSON.parse(buffer)));
});
req.on("error", (e) => reject(e.message));
req.write(JSON.stringify(payload));
req.end();
});
const zapierURL = "https://hooks.zapier.com/hooks/catch/xxxxxx/bljks0k/";
exports.handler = async (event, context) => {
for (const streamedItem of event.Records) {
if (streamedItem.eventName === "INSERT") {
console.log(
"streamedItem.dynamodb.NewImage",
streamedItem.dynamodb.NewImage
);
try {
await post(zapierURL, streamedItem.dynamodb.NewImage);
} catch (err) {
console.log("ERROR", err);
}
}
}
};
I would like the environment variables setup in a way that I can use localhost, the development server, or the production website.
I am using AWS Amplify, Next.js, and Javascript.
I am using the Node.js Google Drive client trying to download certain files from a gdrive. When using the example provided in their GitHub I get a Uncaught (in promise) TypeError: res.data.on is not a function error. The file is still created locally, but it's just an empty file from createWriteStream().
When I log the res variable I get: ReadableStream {locked: false}.
I'm pretty new to streams so this is quite a bit over my head.
Here is my code. You'll notice it's almost exactly what the example they provide looks like.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
res.data
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
Edit: I should add that this is for an Electron application. So while Node is supported, I'm not sure if that may affect the way I can use streams.
This feels like it's a bit of a work around, and I am open to any suggestions, but this was able to solve the issue I was having.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
this.drive.files
.get({ fileId, alt: "media"}, {responseType: 'stream'})
.then((res) => {
const dest = fs.createWriteStream(filePath);
const decoder = new TextDecoder("utf-8");
const reader = res.data.getReader()
reader.read().then(function processText({ done, value }) {
if (done) {
console.log("Stream complete");
return;
}
dest.write(decoder.decode(value))
// Read some more, and call this function again
return reader.read().then(processText);
});
})
}
Please take a look at my implementation, which I used to downloading the file
import { google } from 'googleapis';
const getOauth2Client = () => new google.auth.OAuth2(
process.env.GOOGLE_DRIVE_CLIENT_ID,
process.env.GOOGLE_DRIVE_CLIENT_SECRET,
process.env.GOOGLE_DRIVE_REDIRECT_URL
);
const downloadFile = ({ id, access_token, path }) => {
return new Promise((resolve, reject) => {
const dest = fs.createWriteStream(path);
const oauth2Client = getOauth2Client();
oauth2Client.setCredentials({ access_token });
const drive = google.drive({
version: 'v3',
auth: oauth2Client
});
drive.files.get(
{ fileId: id, alt: 'media' }, { responseType: 'stream' },
(err, res) => {
if (err) reject(err);
res.data
.on('end', () => {
console.log('Done');
})
.on('error', _e => {
console.log('Error', _e);
if (_e) reject(_e);
})
.pipe(dest);
dest.on('finish', () => {
console.log('Download finished');
resolve(true);
});
}
);
});
};
This is because in the renderer process, Google's gaxios modules uses the fetch API instead of Node's http. Fetch API returns a ReadableStream unlike http which returns a Node.js Readable. Currently there's no way to change the default adapter. You can use this quick workaround the convert it.
// Transforms a web ReadableStream to Node.js Readable
function toNodeReadable(webStream) {
const reader = webStream.getReader();
const rs = new Readable();
rs._read = async () => {
const result = await reader.read();
if (!result.done) {
rs.push(Buffer.from(result.value));
} else {
rs.push(null);
}
};
return rs;
}
Usage with your code:
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
toNodeReadable(res.data)
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
I have encountered a problem when following a maximilian schwarzmüller course, which has otherwise been great: https://www.youtube.com/watch?v=qZ1EFnFOGvE
The image logs in the Firebase console as uploaded, recognises the type of file/size etc. But continually loads and never displays the image. I use a post request in POSTMAN to upload the image.
When I upload manually to firebase on their UI, everything works fine.
My code:
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
const cors = require("cors")({ origin: true });
const Busboy = require("busboy");
const fs = require("fs");
const gcconfig = {
projectId: "REDACTED",
keyFilename: "REDACTED"
};
const gcs = require("#google-cloud/storage")(gcconfig);
//
exports.onFileChange = functions.storage.object().onFinalize(event => {
const object = event.data;
const bucket = object.bucket;
const contentType = object.contentType;
const filePath = object.name;
console.log("File change detected, function execution started");
if (object.resourceState === "not_exists") {
console.log("We deleted a file, exit...");
return;
}
if (path.basename(filePath).startsWith("resized-")) {
console.log("We already renamed that file!");
return;
}
const destBucket = gcs.bucket(bucket);
const tmpFilePath = path.join(os.tmpdir(), path.basename(filePath));
const metadata = { contentType: contentType };
return destBucket
.file(filePath)
.download({
destination: tmpFilePath
})
.then(() => {
return spawn("convert", [tmpFilePath, "-resize", "500x500", tmpFilePath]);
})
.then(() => {
return destBucket.upload(tmpFilePath, {
destination: "resized-" + path.basename(filePath),
metadata: metadata
});
});
});
exports.uploadFile = functions.https.onRequest((req, res) => {
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
const busboy = new Busboy({ headers: req.headers });
let uploadData = null;
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(os.tmpdir(), filename);
uploadData = { file: filepath, type: mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
const bucket = gcs.bucket("REDACTED");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
res.status(500).json({
error: err
});
});
});
busboy.end(req.rawBody);
});
});
My security rules:
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write:if true;
}
}
}
I previously had a single file upload set up and working properly. Now I need to make it handle multiple files.
Here is my code right now:
const multer = require('multer')
const { Storage } = require('#google-cloud/storage')
const storage = new Storage()
const m = multer({ storage: multer.memoryStorage() })
module.exports = app => {
app.use('/', router)
router.post(
'/reader-:shortId/file-upload',
passport.authenticate('jwt', { session: false }),
m.array('files'),
async function (req, res) {
const bucketName = req.params.shortId.toLowerCase()
await storage.createBucket(bucketName)
bucket = storage.bucket(bucketName)
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype }
}).on('finish', async response => {
await blob.makePublic()
resolve(response)
}).on('error', err => {
reject('upload error: ', err)
}).end()
})
promises.push(newPromise)
})
Promise.all(promises).then((response) => {
// the response I get here is [undefined, undefined]
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
})
}
req.files does give me an array of files, with a buffer and a size that makes sense.
The promises all resolve.
But once I check the files in the google bucket, they have the right name but don't have any content (and size of 0)
As I said before, it was working when I was doing it with one file (using m.single('file')
I don't want to use the bucket as the destination with multer setup because I also have to change the file name before uploading to google bucket.
edit: this is the code example given by google cloud documentations for single file uploads (https://cloud.google.com/nodejs/getting-started/using-cloud-storage):
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I originally had something like that working, but I just don't understand where it is getting the file buffer data from. That is probably where things are different with multiple files.
I know its too late, but someone might looking an answer for uploading multiple files on Google Cloud Storage.
Dependencies:
Express
Google Cloud Library
Multer
Body Parser
This is the controller code.
exports.post_image_upload = async (req, res) => {
/** Check if file exist */
if (!req.files) {
res.status(400).send('No file uploaded.');
return;
}
let PublicUrls = []
req.files.forEach((file) => {
const blob = bucket.file(file.fieldname + '-' + Date.now() + path.extname(file.originalname))
const blobStream = blob.createWriteStream({
metadata: { contentType: file.mimetype }
})
blobStream.on('finish', ()=> {
blob.makePublic()
})
blobStream.on('error', err => {
//Put your error message here
})
blobStream.end(file.buffer)
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`
PublicUrls.push(Url)
})
res.send(PublicUrls)
}
Good Luck
Ok, turns out I had to change
.end()
to
.end(file.buffer)
Marie Pelletier, I think your approach is 100% right. I modified a little your code trying to avoid the async response:
let promises = []
req.files.forEach((file) => {
const blob = bucket.file(file.originalname)
const newPromise = new Promise((resolve, reject) => {
blob.createWriteStream({
metadata: { contentType: file.mimetype },
resumable: false //Good for small files
}).on('finish', () => {
const Url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
resolve({ name: file.originalname, url: Url });
}).on('error', err => {
reject('upload error: ', err);
}).end(file.buffer);
})
promises.push(newPromise);
})
Promise.all(promises).then((response) => {
res.status(200).send(response)
}).catch((err) => {
res.status(400).send(err.message)
});
This way, I didn't get 'undefined' anymore.