Google translate key missing on node js ajax - javascript

I have a simple Node JS script and which works fine when run locally in terminal:
exports.google_translate = function (translate_text, res) {
var Translate = require('#google-cloud/translate');
var translate = new Translate.Translate({projectId: 'my project'});
translate.translate(translate_text, 'fr').then(results => {
var translation = results[0];
res.send(translation);
}).catch(err => {
res.send('ERROR:', err);
});
}
However whenever I call this via Ajax, I get the following error:
Error: The request is missing a valid API key.
I already added this as a permanent environmental variable using this:
export GOOGLE_APPLICATION_CREDENTIALS="[PATH to key downloaded]"
But still each time I call this script via Ajax, I get the same error. So my question is, how can I get the Node JS script to save the API key so that it works when called via Ajax?
Thanks

It seems that for whatever reason, the application cannot read the environmental variable correctly. Since nodejs stores all environmental variables in the process.env you could ensure that it is written by calling:
function google_translate(translate_text) {
process.env.GOOGLE_APPLICATION_CREDENTIALS = "[PATH to key downloaded]";
return translate.translate(translate_text, 'fr')
.then(console.log)
.catch(console.error);
}
or pass the key directly to the constructor with
const translate = new Translate.Translate({
projectId: 'my-project',
keyFilename: "[PATH to key downloaded]"
});
You can also ensure the key file is read on your end and just pass the config to the translate constructor
const translate = new Translate.Translate({
credentials: JSON.parse(fs.readFileSync("[PATH to key downloaded]", "utf8"))
});
if it still does not help, maybe it's the issue with a key itself, and you could try generating a new one here https://console.cloud.google.com/apis/credentials

const {Translate} = require('#google-cloud/translate').v2;
const translate = new Translate({
credentials: {
"type": "account",
"project_id": "your_project",
"private_key_id": "your_data",
"private_key": "your_data",
"client_email": "your_data",
"client_id": "your_data",
"auth_uri": "your_data",
"token_uri": "your_data",
"auth_provider_x509_cert_url": "your_data",
"client_x509_cert_url": "your_data"
}
});
const text = 'This is testing!';
const target = 'de';
async function translateText() {
// Translates the text into the target language. "text" can be a string for
// translating a single piece of text, or an array of strings for translating
// multiple texts.
let [translations] = await translate.translate(text, target);
translations = Array.isArray(translations) ? translations : [translations];
console.log('Translations:' + translations);
}
translateText();
You must take this credentials.json file from your project on google cloud. They will provide you a file .json

Related

Dynamic filename in Winston dailyrotate for Promtail/Loki/Grafana

My NodeJS application writes logs with Winston. These logs then will be picked up by Promtail, to be saved to S3 by Loki and then processed in a dashboard in Grafana.
I want to create logs in Winston with dailyrotation of 30m. I want the logs to first be stored in my folder "/home/gad-web/gad-logs" when they are still being appended. And when they are rotated I want to move them to "/home/gad-web/gad-logs-rotated". Promtail will be looking at this specific folder.
I want to use dynamic filenames for different logs being written out, so that I can easily assign static labels to each file separetly using Promtail, rather than having to process each log line and assign a dynamic label to each line of log in one large file.
my file logger.mjs looks like this (formats, levels and other irrelevant data is left out):
const logDir = '/home/gad-web/gad-logs'
const logDirRotated = '/home/gad-web/gad-logs-rotated'
let winstonGdprProofFormat = winston.format.combine(...)
let winstonDailyRotateFileTransport = new winston.transports.DailyRotateFile({
frequency: '30m',
format: winstonGdprProofFormat,
filename: `${logDir}/all-gdpr-proof-%DATE%.log`,
datePattern: 'YYYY-MM-DD HH-mm',
})
// Move the file to another location after it is rotated, so it can be picked up by Promtail
winstonDailyRotateFileTransport.on('rotate', function (oldFilenamePath, newFilenamePath) {
let pathToMoveTo = `${logDirRotated}/${path.basename(oldFilenamePath)}`
fs.rename(oldFilenamePath, pathToMoveTo, function (err) {
if (err) throw err
})
})
let winstonTransports = []
if (process.env.environment !== 'local') {
winstonTransports.push(winstonConsoleTransport)
winstonTransports.push(winstonDailyRotateFileTransport)
} else {
winstonTransports.push(winstonConsoleWithColorsTransport)
}
const logger = winston.createLogger({
level: process.env.environment !== 'local' ? 'info' : 'debug',
levels: winstonLevels,
transports: winstonTransports,
})
export function log (obj) {
let { level, requestId, method, uri, msg, time, data } = obj
if (!level) {
level = 'info'
}
logger.log({
level: level,
requestId: requestId,
method: method,
uri: uri,
msg: msg,
time: time,
data: data,
})
}
It is being called in files that write logs like this:
import { log } from '../config/logger.mjs'
...
function writeRequestLog (start, request, requestId) {
let end = new Date().getTime()
let diff = end - start
log({ level: 'info', requestId: requestId, method: request.method, uri: request.path, msg: null, time: `${diff}ms`, data: JSON.stringify(request.query) })
}
Since the file is imported directly, it is immediately executed, and the winstonDailyRotateFileTransport is created using ${logDir}/all-gdpr-proof-%DATE%.log as the filename. How do I go around this instantiating this with a filename, so that I get daily rotated log files of 30minutes for a bunch of dynamically created different files?
I tried creating a Class in JS, but I quickly got into trouble because of the .on('rotate', ...) defined for the winstonDailyRotateFileTransport, and i'm also not sure of other implications creating a class for this might have (since this logger will be used a lot of times in my code)

Interpolate env vars client side in gatsby react app

I am using Gatsby as a Static Site Generator and using Netlify to deploy.
Netlify lets you set Environment Variables in its UI backend.
I've set a few env vars in the Netlify backend to be able to post subscribers to a mailing list.
DATA_NO = 'XXXX'
LIST_ID = '123456'
API_KEY = 'XXXXXXXXXXXXXXXXXXXXXXXXXX'
In my src files, I've got a component that responds to a onSubmit event and constructs a URL to post a new subscriber.
(axios is used as a package for sending HTTP requests, etc)
import React, { useState } from "react"
import axios from 'axios'
const Form = () => {
const [userEmail, setState] = useState({'email_address': ''})
const creds = 'anystring:'+ process.env.API_KEY
let URL = 'https://'+ process.env.DATA_NO +'.api.example.com/3.0'
URL += '/lists/'+ process.env.LIST_ID +'/members'
const submitSubscribe = async e => {
e.preventDefault()
const payload = {
'email_address': userEmail.email_address,
'status': 'subscribed'
}
try {
const response = await axios.post( URL , payload, {
headers: {
'Authorization': 'Basic ' + Buffer.from(creds ).toString('base64')
}
})
console.log('r', response)
console.log('r data', response.data)
} catch(err) {
console.log(err);
}
}
return (
<form name="newsletter-signup" method="post" onSubmit={submitSubscribe}>
{/*<input type="hidden" name="form-name" value="newsletter-signup" />*/}
<input type="email" placeholder="Email required" onChange={handleChange} value={userEmail.email_address} required />
<button type="submit" className="button primary-button-inverted">Send'</button>
</form>
)
}
So, what's happening is that on RUN time, my env vars are coming out as undefined.
I've been on the Netlify docs and they keep saying you need to interpolate the values to the client to be able to use them. I understand the logic here. These env vars need to be printed and bundled during build time, not invoked at run time.
The question I'm struggling with is HOW do I do this?
I have set up a .env.development file in the root of my project. I have tried prefixing my env vars with GATSBY_ but I still have the same trouble.
I tried using require('dotenv').config() but I'm not sure where exactly to put that (in my gatsby-node.js, gatsby-config.js) or do I need to include on the page with my component that is using these env vars.
I'd like to be able to set these vars up in one place (maybe two if testing in development) but I don't want to much tweaking involved to be able to use them in both development and production builds.
I also understand that Netlify or Gatsby can process these vars into a functions/ folder in my source code that I can somehow make use of but that seems like more than I need to just post a simple form.
Please help!
Update
Current code:
In my project root, I created two .env files, one for development and one for production. They each share the following format (remember, I am developing in GatsbyJS):
GATSBY_MC_API_KEY="xxxxxxxxxxxxxxxxxxxxxxxxx-xxxx"
GATSBY_MC_DATA_NO="xxxx"
GATSBY_MC_AUDIENCE_ID="xxxxxxxxxxx"
I've set up a separate config.js file in src/config/config.js to organize and validate my env vars (thanks #Baboo_). It looks like:
export const MC_API_KEY = process.env.GATSBY_MC_API_KEY;
export const MC_DATA_NO = process.env.GATSBY_MC_DATA_NO;
export const MC_AUDIENCE_ID = process.env.GATSBY_MC_AUDIENCE_ID;
const envVars = [
{name: "MC_API_KEY", value: MC_API_KEY},
{name: "MC_DATA_NO", value: MC_DATA_NO},
{name: "MC_AUDIENCE_ID", value: MC_AUDIENCE_ID}
]
export const checkEnvVars = () => {
const envVarsNotLoaded = envVars.filter((envVar) => envVar.value !== undefined);
if (envVarsNotLoaded.length > 0) {
throw new Error(`Could not load env vars ${envVarsNotLoaded.join(",")}`);
}
}
checkEnvVars()
However, when I run gatsby develop, the "Could not load env vars" error gets thrown.
You are doing it the right way.
What you have to do is indeed prefix your environment variables with GATSBY_, Gatsby will automatically load them. Then call them in your code:
const creds = 'anystring:'+ process.env.GATSBY_API_KEY
let URL = 'https://'+ process.env.GATSBY_DATA_NO +'.api.example.com/3.0'
tURL += '/lists/'+ process.env.GATSBY_LIST_ID +'/members'
Make sure to use the whole string process.env.GATSBY_LIST_ID instead of process.env[GATSBY_LIST_ID] because the object process.env is undefined.
Locally
Make sure to create to .env files, .env.development and .env.production. The former is used when you run gatsby develop and the latter when you run gatsby build.
You may already know that you shouldn't commit these files.
Netlify
Add the same environment variables in your deployment pipeline on Netlify. Here is the related doc. This way Netlify can build your webiste when being deployed.
Improvements
Instead of refering environment variables directly, create a file where they are loaded and if one of them cannot be retrieved, throw an error. This way you will be noticed when the loading fails and save debugging time.
Example:
// config.js
export const API_KEY = process.env.GATSBY_API_KEY;
export const DATA_NO = process.env.GATSBY_DATA_NO ;
const envVars = [
{name: "API_KEY", value: API_KEY},
{name: "DATA_NO", value: DATA_NO},
]
const checkEnvVars = () => {
const envVarsNotLoaded = envVars.filter(isUndefined);
if (envVarsNotLoaded.length > 0) {
throw new Error(`Could not load env vars ${envVarsNotLoaded.join(",")}`);
}
}
const isUndefined = (envVar) => typeof envVar.value === "undefined";
// component.js
import React, { useState } from "react"
import axios from 'axios'
// Import environment variables
import { API_KEY, DATA_NO } from "./config"
const Form = () => {
// ...
const [userEmail, setState] = useState({'email_address': ''})
const creds = 'anystring:'+ API_KEY
let URL = 'https://'+ DATA_NO +'.api.example.com/3.0'
You need to add a different env file for the two environments to make this work.
Meaning .env.development and .env.production.

Read settings contained in a json file in javascript

Im tring to read a simple setting from a json file, the json is this :
{
"Label": "some string here"
}
form my javascript part i do:
import settings from '../settings.json';
then:
var settings= ()=> {
const headers = new Headers();
const requestOptions = {
method: 'GET',
headers: { ...headers.authentication, ...headers.culture, 'ContentType':'application/json',
};
return fetch(`${settings.Label}`, requestOptions).then(() => {
return response.text().then(text => {
const data = text ? text && JSON.parse(text) : {};
let token = response.headers.get('X-Token');
if (token) {
data.token = token;
}
if (!response.ok) {
// manage error here
}
return Promise.reject(error);
}
return data;
})
});
};
// use settings here
Despite my many searches and attempts im not very expert in javascript,i have tried in many ways before, but the my variable 'settings' is not contain nothing.
I believe you need to add an export to your JSON file
export const settings = {
"label": "some string here"
}
Not much information given here, but this probably has to do with transpiling your javascript. You can use:
const settings = require('../settings.json')
instead.
try this answer https://stackoverflow.com/a/59844868/7701381
Also, change the name of the imported json settings or the var settings, cuz this might cause unexpected behaviors
I had completely wrong the approach, the file is already available and I don't have to request to download it from the server, I just have to return string, without use of fetch or other:
return (`${settings.Label}`
Sorry and thank a lot for the support

Not able to autheticate google cloud vision api . How to autheticate it to use it further

My Code looks like this :-
var vision = require('#google-cloud/vision');
handleSubmit = () =>{
console.log("encoded string submitted=",this.state.files);
this.useVisionCloud();
}
useVisionCloud = () =>{
const client = new vision.ImageAnnotatorClient();
const request_body = {
"requests": [
{
"image": {
"content": this.state.files
},
"features": [
{
"type": "TEXT_DETECTION"
}
]
}
]
};
client.textDetection(request_body).then(response => {
console.log("text got=",response);
// doThingsWith(response);
})
.catch(err => {
console.log("error got=",err);
});
}
I already tried setting environment variable but didn't work .
I have also created service account and downloaded the file.json
but dont know how to use it for authentication
im getting the following error :-
Uncaught Error: {"clientConfig":{},"port":443,"servicePath":"vision.googleapis.com","scopes":["https://www.googleapis.com/auth/cloud-platform","https://www.googleapis.com/auth/cloud-vision"]}You need to pass auth instance to use gRPC-fallback client in browser. Use OAuth2Client from google-auth-library.
on triggering a post request to Google API .
Final Query is :- I am not able to understand how to authenticate API so that i can further use it to detect text in my images
Set the env variable eg:
export GOOGLE_APPLICATION_CREDENTIALS="[PATH]"
I would recommend having a separate backend that calls vision api. Then have the javascript code in your browser call your backend.

Error serving HTML files from an Azure function

I am trying to open, read and return an HTML files using Azure functions. I am developing locally and the logs says that the function executed successfully however on the browser I am getting 500 internal server error. Am I doing something wrong in here?
const fs = require('fs');
const path = require('path');
const mime = require('../node_modules/mime-types');
module.exports = function (context, req) {
const staticFilesFolder = 'www/build/';
const defaultPage = 'index.html';
getFile(context, req.query.file);
function getFile(context, file) {
const homeLocation = process.env["HOME"];
if(!file || file == null || file === undefined){
context.done(null,{status:200,body:"<h1>Define a file</h1>",headers:{
"Content-Type":" text/html; charset=utf-8"
}});
}
fs.readFile(path.resolve(path.join(homeLocation, staticFilesFolder, file)),
(err, htmlContent) => {
if (err) {
getFile(context, "404.html");
}
else {
const res = {
status: 200,
body: htmlContent,
headers:{
"Content-Type": mime.lookup(path.join(homeLocation, staticFilesFolder, file))
}
}
context.done(null,res);
}
})
}
};
Note
I am sure that 404.html exists and index.html exists. When I log the contents of htmlContent it is giving the correct output.
functions.json
{
"disabled": false,
"bindings": [
{
"authLevel": "anonymous",
"type": "httpTrigger",
"direction": "in",
"methods":["get"],
"route":"home",
"name": "req"
},
{
"type": "http",
"direction": "out",
"name": "res"
}
]
}
Response on Chrome
If I removed "Content-Length" header the status code changes to 406.
Update 1 The code seems to be running normally on Azure Portal but it is not working when running it locally.
It looks like you are combining two methods of returning data from an http triggered function(context.res and context.done()): https://learn.microsoft.com/en-us/azure/azure-functions/functions-reference-node#accessing-the-request-and-response
Since you are using context.res, try removing context.done();
You are making an incorrect use of context.res, you shouldn't be overwriting it but instead leveraging the methods provided by the Response class provided in the Azure NodeJS worker. If you are using using VSCode you'll get intellisense for these methods. Otherwise see: https://github.com/Azure/azure-functions-nodejs-worker/blob/dev/src/http/Response.ts
Your code should look something like this instead.
context.res.setHeader('content-type', 'text/html; charset=utf-8')
context.res.raw(htmlContent)
Using context.res.raw or context.res.send will already perform the context.done call for you.
Make sure you use content-type=text/html; charset-utf8 instead of content-type=text/html or you'll trigger an issue with the returned content-type. Instead of returning content-type=text/html you end up getting content-type=text/plain which will fail to render your html.
Addressed on: https://github.com/Azure/azure-webjobs-sdk-script/issues/2053

Categories