I want to call the AWS Textract service to identify the numbers in a local photo in JavaScript(without S3) and I get an error
TypeError:Cannot read property 'byteLength' of undefined ': Error in' Client.send (command)
I tried to find the correct sample in the AWS SDK for JavaScript V3 official documentation but couldn't find it.
I want to know how do I modify the code to call this service
This is my code
const {
TextractClient,
AnalyzeDocumentCommand
} = require("#aws-sdk/client-textract");
// Set the AWS region
const REGION = "us-east-2"; // The AWS Region. For example, "us-east-1".
var fs = require("fs");
var res;
var imagedata = fs.readFileSync('./1.png')
res = imagedata.toString('base64')
console.log("res2")
console.log(typeof(res))
// console.log(res)
const client = new TextractClient({ region: REGION });
const params = {
Document : {
Bytes: res
}
}
console.log("params")
console.log(typeof(params))
// console.log(params)
const command = new AnalyzeDocumentCommand(params);
console.log("command")
console.log(typeof(command))
const run = async () => {
// async/await.
try {
const data = await client.send(command);
console.log(data)
// process data.
} catch (error) {
console.log("Error");
console.log(error)
// error handling.
} finally {
// finally.
}
};
run()
I'm trying to use Microsoft Graph API to acess oneDrive folders and archives in node.js, the authentication goes fine, and returns the calendar as well, but i have used this:
var graph = require('#microsoft/microsoft-graph-client');
require('isomorphic-fetch');
module.exports = {
getUserDetails: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const user = await client.api('/me').get();
return user;
},
// GetEventsSnippet
getEvents: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
const events = await client
.api('/me/events')
.select('subject,organizer,start,end')
.orderby('createdDateTime DESC')
.get();
return events;
},
getDrives: async function(accessToken) {
const client = getAuthenticatedClient(accessToken);
try{
const drive = await client
.api('/me/drive/root/children')
.get();
console.log(drive);
return drive;
} catch (e) {
console.log(e);
}
}
};
function getAuthenticatedClient(accessToken) {
// Initialize Graph client
const client = graph.Client.init({
// Use the provided access token to authenticate
// requests
authProvider: (done) => {
done(null, accessToken);
}
});
console.log(client);
return client;
}
And it don't return anything... But saying that im unauthenticated
I am following this documentation: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/drive_list?view=odsp-graph-online
How can i make this work?
I have been following the AWS-Kendra react-search app example you can find here:
https://docs.aws.amazon.com/kendra/latest/dg/deploying.html
After importing the Kendra client with:
const kendra = require('aws-sdk/clients/kendra');
const kendraClient = new kendra({apiVersion: '2019-02-03', region: 'us-east-1'});
Any call on kendraClient to any of the kendra services returns null. I have been executing queries with:
const results = kendraClient.query({ IndexId: INDEX_ID, QueryText: queryText});
Which returns a request object with null data and error fields.
I have calls to S3 which execute correctly in the same file so I do not believe it to be an authentication problem. If I had to guess it's some issue with how I created the kendra object and client, the usual
kendra = new AWS.Kendra();
doesn't work because Kendra is not part of the browser version of the SDK.
Are you trying to run js from browser directly? Here is a sample nodejs code
var kendra = require("aws-sdk/clients/kendra");
var kendraClient = new kendra({apiVersion: "2019-02-03", region: "us-west-2"});
exports.handler = function (event) {
try{
console.log("Starting....");
var params = {
IndexId: "<<Enter your indexId here>>",
QueryText: "<<Enter your queryText here>>",
QueryResultTypeFilter: "DOCUMENT",
PageNumber: 1
};
var kendraResponse = kendraClient.query(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log("Kendra result is", data); // successful response
});
const response = {
"dialogAction":
{
"fulfillmentState":"Fulfilled",
"type":"Close","message":
{
"contentType":"PlainText"
}
}
}
return response;
} catch (error) {
console.log(error)
}
};
I have an azure function that sends a file to an SFTP server. It may be called multiple times so to save time connecting to the SFTP server I would like to reuse the SFTP connection.
const Client = require("ssh2-sftp-client");
const sftp = new Client();
let sftpConnected = false;
const sendToSFTP = async (data, location, context) => {
await setConn(context)
try{
await sftp.put(data, location);
}catch(err){
context.log.error('sftp put error: ' + err);
}
}
const setConn = async (context) => {
if (sftpConnected) return
try{
await sftp.connect({
host: 'myserver',
username: 'user',
passphrase: 'pwd',
});
sftpConnected = true;
}catch(err){
context.log.error('sftp connect error: ' + err);
}
}
sftp.on('close', () => {
sftpConnected = false;
sftp.end();
});
sftp.on('end', () => {
sftpConnected = false;
sftp.end();
});
exports.sendToSFTP = sendToSFTP;
This works when testing locally but when deployed to Azure the close and end listeners do not seem to be called when the Azure function goes idle. When I try to use the sftp connection after a period of time there is no connection there and the call to the sftp server times out.
Has anyone solved this issue?
I use the Google Drive API to upload multiple files.
I faced with a problem running out of RAM while uploading multiples files. I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time, but It doesn't work the way I expected. It always loop through the entire of list files and upload the same time.
I try to use async/await syntax to block the loop but It didn't work the way I expected.
Here is my code:
const fs = require("fs");
const readline = require("readline");
const { google } = require("googleapis");
let files = ["file1.mp4", "file2.mp4"];
const SCOPES = ["https://www.googleapis.com/auth/drive.metadata.readonly"];
const TOKEN_PATH = "token.json";
fs.readFile("credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), uploadFiles);
});
function authorize(credentials, callback) {
const { client_secret, client_id, redirect_uris } = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id,
client_secret,
redirect_uris[0]
);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return getAccessToken(oAuth2Client, callback);
oAuth2Client.setCredentials(JSON.parse(token));
callback(oAuth2Client);
});
}
function getAccessToken(oAuth2Client, callback) {
const authUrl = oAuth2Client.generateAuthUrl({
access_type: "offline",
scope: SCOPES
});
console.log("Authorize this app by visiting this url:", authUrl);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question("Enter the code from that page here: ", code => {
rl.close();
oAuth2Client.getToken(code, (err, token) => {
if (err) return console.error("Error retrieving access token", err);
oAuth2Client.setCredentials(token);
// Store the token to disk for later program executions
fs.writeFile(TOKEN_PATH, JSON.stringify(token), err => {
if (err) console.error(err);
console.log("Token stored to", TOKEN_PATH);
});
callback(oAuth2Client);
});
});
}
async function uploadFiles(auth) {
for (file of files) {
var fileMetadata = {
name: file
};
var media = {
body: fs.createReadStream("test/" + file)
};
google.drive({ version: "v3", auth });
const result = await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id"
},
function(err, fileid) {
if (err) {
// Handle error
console.error(err);
} else {
console.log("File Id: ", fileid.data.id);
console.log("Uploaded..:" + file);
}
}
);
console.log("Uploading file..:" + file);
}
}
I just want to ask why the loop does not upload single files each?
I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time
You can't, the process is entirely asynchronous. You passed a callback as an argument to the function drive.files.create.
By the way, if you want to use async/await, you should wrap your function into a promisified one.
function myCreateFunc (fileInfos) {
return new Promise((resolve, reject) => {
google.drive.create(filesInfos, function callback(err, fileId) {
if(err)
reject(err)
resolve(fileId)
})
});
}
Even after a long time, I will post my answer because I had the same problem.
Requirements:
Enable APIS and Services for your project.
Create a service account and download the key.
If you need to upload your files into a folder, you need the folder id.
Install googleapis using npm install googleapis.
Follow these steps to get your folder id.
To find folder id, you need to provide permission to the service account user.
Share it with edit access and get the link to the folder.
You will see something like this: https://drive.google.com/drive/u/1/folders/xxxxxXXXXXXxxxxxxxXXXXXXX
In this case folder id is xxxxxXXXXXXxxxxxxxXXXXXXX
const { google } = require("googleapis");
var fs = require("fs");
const KEYFILEPATH = "path to your keyfile.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
const uploadFiles = async (auth) => {
const driveService = google.drive({ version: "v3", auth });
let count = 0;
for (const file of fs.readdirSync(
"full file path to your images/files folder"
)) {
// Log the file name.
console.log(JSON.stringify(file));
let fileMetadata = {
name: file,
parents: ["folder id"], //Optional
};
let media = {
mimeType: "image/jpeg",
body: fs.createReadStream(
`full file path to your images/files folder/${file}`
),
};
const task = driveService.files.create({
resource: fileMetadata,
media: media,
fields: "id",
});
try {
await task;
count = count + 1;
} catch (e) {
console.log(e);
return null;
}
}
// log the total count of uploaded files.
console.log("Count :", count);
return;
};
uploadFiles(auth).catch(console.error);