Nodejs Express outsource logic from controller to service - javascript

i have a function that gets data from an API. I now want to outsource the program logic to a service to keep the controller cleaner.
I get the data about Async/Await, unfortunately I don't know how to outsource it to a service?
Anyone have an idea?
Here my homeController.js:
const ispwrapper = require('../lib/ispwrapper');
require('dotenv').config();
const BASE_URL = process.env.API_BASE_URL;
const OPTIONS = {
username: process.env.API_USERNAME,
password: process.env.API_PASSWORD
};
const renderHome = async (req, res) => {
let domain = [],
message = '';
try {
let a = new ispwrapper.ISPConfig(BASE_URL, OPTIONS);
const response = await a.getDataByPrimaryId('sites_web_domain_get', { active: 'y' });
for (let i = 0; i < response.length; i++){
domain.push(response[i].domain);
}
} catch(err) {
message = 'Error when retriving domains from API';
} finally {
res.render('home', {
title: 'ISPConfig',
heading: 'Welcome to my ISPConfig Dashboard',
homeActive: true,
domain,
message
});
}
};
module.exports = {
renderHome
};
My homeService.js:
const
ispwrapper = require('../lib/ispwrapper');
require('dotenv').config();
const BASE_URL = process.env.API_BASE_URL;
const OPTIONS = {
username: process.env.API_USERNAME,
password: process.env.API_PASSWORD
};
const getDomains = async () => {
// i have no idea how use my renderHome() logic here
};
module.exports = {
getDomains
};

Here my solution.
domainService.js:
const
ispwrapper = require('../lib/ispwrapper');
require('dotenv').config();
const BASE_URL = process.env.API_BASE_URL;
const OPTIONS = {
username: process.env.API_USERNAME,
password: process.env.API_PASSWORD
};
const getDomains = async () => {
let data = [];
try {
let a = new ispwrapper.ISPConfig(BASE_URL, OPTIONS);
let response = await a.getDataByPrimaryId('sites_web_domain_get', { active: 'y' });
for (let i = 0; i < response.length; i++){
data.push(response[i].domain);
}
return data;
} catch(err) {
console.log(err);
}
};
module.exports = {
getDomains
};
homeController:
const homeService = require('../services/domainService');
const renderHome = async (req, res) => {
let message = '',
domain = [];
try {
let response = await homeService.getDomains();
for (let i = 0; i < response.length; i++){
domain.push(response[i]);
}
} catch(err) {
message = 'Error when retriving domains from API';
} finally {
res.render('home', {
title: 'ISPConfig',
heading: 'Welcome to my ISPConfig Dashboard',
homeActive: true,
domain,
message
});
}
};
module.exports = {
renderHome
};

Related

It doesn't show me the data in the terminal when i send the post request twice

I am trying to create a middleware that receive a form-data and return the fieldname, contentType and the value. So when I send the firts post the data view in the terminal but if I send the same request again doesn't show me the data in the terminal.
And if a toggle the image, the data come show in the terminal
This is my code:
server:
const express = require("express");
const Upes = require("../upes");
const app = express();
const start = new Upes();
app.post("/", start.setup.bind(start), (req, res) => {
res.send("all right");
});
app.listen(3000, () => {
console.log("The server is active");
});
the index of my middleware:
const getData = require("./utils/getData");
const parseContentType = require("./utils/parseContentType");
class Upes {
setup(req, res, next) {
const contentType = parseContentType(req.headers["content-type"]);
if (!contentType) {
throw new Error("Malformed content-type");
}
const SUBTYPES = ["form-data", "x-www-form-urlencoded"];
if (!SUBTYPES.includes(contentType.subtype)) {
throw new Error(
"The subtypes does not match the following subtypes: " + SUBTYPES
);
}
getData(req, contentType.params.boundary, (data) => {
console.log(data);
});
next();
}
}
module.exports = Upes;
The function that receive the data and processes it:
function getData(req, boundary, callback) {
let chunk = "";
let data = [];
req.on("data", (buffer) => {
chunk += buffer.toString();
});
req.on("end", () => {
// Split the chunk in blocks
const blocks = getBlock(chunk, boundary);
blocks.forEach((block) => {
let [params, value] = block.split("\r\n\r\n");
params = params.split(";");
let fieldname = params[1].split("=")[1].replaceAll('"', "");
let contentType = () => {
const condition = params.length === 3;
if (condition) {
let type = params[2].split(":")[1].replace(" ", "");
return type;
}
return "text-plain";
};
const payload = {
fieldname: fieldname,
contentType: contentType(),
value: "", // value.replace("\r\n", "")
};
data.push(payload);
});
callback(data);
});
}
function getBlock(body, boundary) {
boundary = boundary.replaceAll("-", "");
return body.replaceAll("-", "").split(`${boundary}`).slice(1, -1);
}
module.exports = getData;
Send the same request 20 times
I don't know what happend, please can someone help me?

Nodejs TypeError: Cannot read properties of undefined (reading 'refresh')

I need your help, it turns out that I am trying to use the Hubstaff api. I am working on nodejs to make the connection, I followed the documentation (official hubstaff api documentation) and use the methods they give as implementation examples (example of implementation nodejs).
But I get the following error:
I don't know why this happens, and I can't find more examples of how I can implement this api. The openid-client lib is used to make the connection through the token and a state follow-up is carried out to refresh the token.
To be honest, I'm not understanding how to implement it. Can someone who has already used this API give me a little explanation? I attach the code
hubstaffConnect.util
const {
Issuer,
TokenSet
} = require('openid-client');
const fs = require('fs');
const jose = require('jose');
// constants
const ISSUER_EXPIRE_DURATION = 7 * 24 * 60 * 60; // 1 week
const ACCESS_TOKEN_EXPIRATION_FUZZ = 30; // 30 seconds
const ISSUER_DISCOVERY_URL = 'https://account.hubstaff.com';
// API URl with trailing slash
const API_BASE_URL = 'https://api.hubstaff.com/';
let state = {
api_base_url: API_BASE_URL,
issuer_url: ISSUER_DISCOVERY_URL,
issuer: {}, // The issuer discovered configuration
issuer_expires_at: 0,
token: {},
};
let client;
function loadState() {
return fs.readFileSync('./configState.json', 'utf8');
}
function saveState() {
fs.writeFileSync('./configState.json', JSON.stringify(state, null, 2), 'utf8');
console.log('State saved');
}
function unixTimeNow() {
return Date.now() / 1000;
}
async function checkToken() {
if (!state.token.access_token || state.token.expires_at < (unixTimeNow() + ACCESS_TOKEN_EXPIRATION_FUZZ)) {
console.log('Refresh token');
state.token = await client.refresh(state.token);
console.log('Token refreshed');
saveState();
}
}
async function initialize() {
console.log('API Hubstaff API');
let data = loadState();
data = JSON.parse(data);
if (data.issuer) {
state.issuer = new Issuer(data.issuer);
state.issuer_expires_at = data.issuer_expires_at;
}
if (data.token) {
state.token = new TokenSet(data.token);
}
if (data.issuer_url) {
state.issuer_url = data.issuer_url;
}
if (data.api_base_url) {
state.api_base_url = data.api_base_url;
}
if (!state.issuer_expires_at || state.issuer_expires_at < unixTimeNow()) {
console.log('Discovering');
state.issuer = await Issuer.discover(state.issuer_url);
state.issuer_expires_at = unixTimeNow() + ISSUER_EXPIRE_DURATION;
console.log(state.issuer);
}
client = new state.issuer.Client({
// For personal access token we can use PAT/PAT.
// This is only needed because the library requires a client_id where as the API endpoint does not require it
client_id: 'PAT',
client_secret: 'PAT',
});
saveState();
console.log('API Hubstaff initialized');
}
async function request(url, options) {
await checkToken();
let fullUrl = state.api_base_url + url;
return client.requestResource(fullUrl, state.token, options);
}
function tokenDetails() {
let ret = {};
if (state.token.access_token) {
ret.access_token = jose.JWT.decode(state.token.access_token);
}
if (state.token.refresh_token) {
ret.refresh_token = jose.JWT.decode(state.token.refresh_token);
}
return ret;
}
module.exports = {
initialize,
checkToken,
request,
tokenDetails
};
controller
const usersGet = async(req, res = response) => {
const response = await api.request('v2/organizations', {
method: 'GET',
json: true,
});
const body = JSON.parse(response.body);
res.render('organizations', {
title: 'Organization list',
organizations: body.organizations || []
});
};

How to test firebase callable function with firestore

I have a firebase callable function like
exports.getUserInfo = functions.https.onCall(async (data, context) => {
const userIdRef = store.collection('UserID').doc('Document');
let res = null;
try {
res = await store.runTransaction(async t => {
const doc = await t.get(userIdRef);
currentUserId = doc.data().ID;
const newUserIdNum = currentUserId + 1;
await t.update(userIdRef, {ID: newUserIdNum});
let initUserData = {
'userID': newUserId,
}
return initUserData ;
});
console.log('Transaction success');
} catch (e) {
console.error('Transaction failure:', e);
}
return res;
})
but I am not sure how to create unit test about this. I want to mock UserID/Documment before I call
test.wrap(getUserInfo), like
const myFunctions = require('../index.js')
const getUserInfoWrapped = test.wrap(myFunctions.getUserInfo);
var assert = require('assert');
describe('User', function() {
describe('getUserInfo()', function() {
it('should create user', async function() {
// sample code, this won't work
const snapshot = test.firestore.makeDocumentSnapshot({ID: 1001}, 'UserID/Document');
const data = {}
const result = await getUserInfoWrapped (data, {});
assert.equal(result.userID, "1002");
});
});
});
seems this case not covered by firebase document
Reference:
https://firebase.google.com/docs/functions/unit-testing
https://github.com/firebase/firebase-functions-test/blob/c77aa92d345b8e4fb5ad98534989eb8dcf7d9bc4/spec/providers/https.spec.ts

Apify - How to Enqueue URL Variations Efficiently

I am creating a new actor in Apify with Cheerio to read an input file of URLs and return primarily two items: (1) the HTTP status code and (2) the HTML title. As part of our process, I would like to be able to try up to 4 variations of each input URL, such as:
HTTP://WWW.SOMEURL.COM
HTTPS://WWW.SOMEURL.COM
HTTP://SOMEURL.COM
HTTPS://SOMEURL.COM
If one of the 4 variations is successful, then the process should ignore the other variations and move to the next input URL.
I read the original input list into a RequestList, and then would like to create the variations in a RequestQueue. Is this the most efficient way to do it? Please see code below, and thank you!
const Apify = require('apify');
const {
utils: { enqueueLinks },
} = Apify;
const urlParse = require('url');
Apify.main(async () => {
const input = await Apify.getInput();
const inputFile = input.inputFile;
console.log('INPUT FILE: ' + inputFile);
const requestList = await Apify.openRequestList('urls', [
{ requestsFromUrl: inputFile, userData: { isFromUrl: true } },
]);
const requestQueue = await Apify.openRequestQueue();
const proxyConfiguration = await Apify.createProxyConfiguration();
const handlePageFunction = async ({ $, request, response }) => {
let parsedHost = urlParse.parse(request.url).host;
let simplifiedHost = parsedHost.replace('www.', '');
const urlPrefixes = ['HTTP://WWW.', 'HTTPS://WWW.', 'HTTP://', 'HTTPS://'];
let i;
for (i = 0; i < urlPrefixes.length; i++) {
let newUrl = urlPrefixes[i] + simplifiedHost;
console.log('NEW URL: ' + newUrl);
await requestQueue.addRequest({ url: newUrl });
}
console.log(`Processing ${request.url}`);
const results = {
inputUrl: request.url,
httpCode: response.statusCode,
title: $('title').first().text().trim(),
responseUrl: response.url
};
await Apify.pushData(results);
};
const crawler = new Apify.CheerioCrawler({
proxyConfiguration,
maxRequestRetries: 0,
handlePageTimeoutSecs: 60,
requestTimeoutSecs: 60,
requestList,
requestQueue,
handlePageFunction,
handleFailedRequestFunction: async ({ request }) => {
await Apify.pushData({ inputUrl: request.url, httpCode: '000', title: '', responseUrl: ''});
}
});
await crawler.run();
});
you should create your URL list beforehand. the handlePageFunction is only used for the actual scraping part, and you should only have the Apify.pushData there:
//...
const initRequestList = await Apify.openRequestList('urls', [
{ requestsFromUrl: inputFile },
]);
const parsedRequests = [];
let req;
while (req = await initRequestList.fetchNextRequest()) {
const parsedHost = urlParse.parse(req .url).host;
const simplifiedHost = parsedHost.replace('www.', '');
const urlPrefixes = ['HTTP://WWW.', 'HTTPS://WWW.', 'HTTP://', 'HTTPS://'];
for (let i = 0; i < urlPrefixes.length; i++) {
let newUrl = urlPrefixes[i] + simplifiedHost;
console.log('NEW URL: ' + newUrl);
parsedRequests.push({
url: newUrl,
userData: { isFromUrl: true }
});
}
}
const requestList = await Apify.openRequestList('starturls', parsedRequests);
//...
const crawler = new Apify.CheerioCrawler({
proxyConfiguration,
maxRequestRetries: 0,
handlePageTimeoutSecs: 60,
requestTimeoutSecs: 60,
handlePageFunction,
requestList,
handleFailedRequestFunction: async ({ request }) => {
await Apify.pushData({ inputUrl: request.url, httpCode: '000', title: '', responseUrl: ''});
}
});
//...
requestsFromUrl is a greedy function that tries to parse all URLs from to the given resource. so you'll have to perform the processing as an additional step.

How to make api endpoint target user's localhost when deployed to Heroku

I have this api which works fine when running locally. But, once it is deployed to Heroku i get a error 503 which is because it tries to target localhost on Heroku's server and not the user's localhost. Is there a way to make this target the user's localhost instead?
The frontend is React. Here's the code in React that fetches this api every 5sec.
axiosFunc = () => {
const { user } = this.props.auth;
console.log(user);
axios.get(`api/avaya/${user.id}`).then((res) => console.log(res));
};
timer = (time) => {
const date = new Date(time);
return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`;
};
componentDidMount() {
this.axiosFunc();
this.interval = setInterval(this.axiosFunc, 5000);
}
componentWillUnmount() {
clearInterval(this.interval);
}
and this is the API on the backend with express
const router = require("express").Router();
const xml2js = require("xml2js");
const Avaya = require("../../models/Avaya");
const User = require("../../models/User");
router.route("/:id").get(async (req, res) => {
const user = await User.findById(req.params.id);
const axios = require("axios");
axios({
method: "post",
baseURL: `http://127.0.0.1:60000/onexagent/api/registerclient?name=${user.username}`,
timeout: 2000,
})
.then((reg) => {
xml2js
.parseStringPromise(reg.data, { mergeAttrs: true })
.then((result) => {
if (result.RegisterClientResponse.ResponseCode[0] === "0") {
const clientId = result.RegisterClientResponse.ClientId[0];
user.avayaClientId = clientId;
user.save();
}
const clientId = user.avayaClientId;
axios({
method: "post",
url: `http://127.0.0.1:60000/onexagent/api/nextnotification?clientid=${clientId}`,
}).then((notification) => {
xml2js
.parseStringPromise(notification.data, { mergeAttrs: true })
.then((result) => {
const notifType = [];
const notifDetails = [];
for (let i in result.NextNotificationResponse) {
notifType.push(i);
}
const arranged = {
NotificationType: notifType[1],
ResponseCode:
result.NextNotificationResponse[notifType[0]][0],
};
for (let i in result.NextNotificationResponse[
notifType[1]
][0]) {
notifDetails.push(i);
}
for (let i = 0; i < notifDetails.length; i++) {
arranged[[notifDetails[i]][0]] =
result.NextNotificationResponse[notifType[1]][0][
notifDetails[i]
][0];
}
for (let i in arranged) {
if ("Outbound" in arranged) {
arranged.CallType = "Outbound";
} else if ("Inbound" in arranged)
arranged.CallType = "Inbound";
else {
arranged.CallType = " ";
}
}
if (
arranged.NotificationType === "VoiceInteractionCreated" ||
arranged.NotificationType === "VoiceInteractionMissed" ||
arranged.NotificationType === "VoiceInteractionTerminated"
) {
const newLogs = new Avaya({
notification: arranged,
});
newLogs.owner = user;
newLogs.save();
user.avayaNotifications.push(newLogs),
user
.save()
.then((logs) => res.json(logs))
.catch((err) => res.status(400).json("Error: " + err));
} else {
res.send("Nothing to record");
}
});
});
});
})
.catch((err) => res.status(503).json(err));
});
router.route("/history/:username").get(async (req, res) => {
const user = await User.findOne({ username: [`${req.params.username}`] });
Avaya.find({ owner: [`${await user.id}`] }).then((user) => res.json(user));
});
module.exports = router;
EDIT: I was able to fix thanks to #Molda
using fetch instead of axios doesn't result in cors error.
New frontend code
getLogs = async () => {
const { user } = this.props.auth;
const reg = await fetch(
`http://127.0.0.1:60000/onexagent/api/registerclient?name=${user.id}`
);
let regData = await reg.text();
let regxml = new XMLParser().parseFromString(regData);
if (regxml.attributes.ResponseCode === "0") {
axios.post(`/api/avaya/register/${user.id}`, regxml);
console.log(regxml.attributes.ResponseCode);
}
let resp = await fetch(`/api/avaya/getid/${user.id}`);
let clientId = await resp.text();
let logs = await fetch(
`http://127.0.0.1:60000/onexagent/api/nextnotification?clientid=${clientId}`
);
let data = await logs.text();
var xml = new XMLParser().parseFromString(data);
axios.post(`/api/avaya/getlogs/${user.id}`, xml);
};
timer = (time) => {
const date = new Date(time);
return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`;
};
componentDidMount() {
this.getLogs();
this.interval = setInterval(this.getLogs, 5000);
}
New backend code:
const router = require("express").Router();
const Avaya = require("../../models/Avaya");
const User = require("../../models/User");
router.route("/register/:id").post(async (req, res) => {
const user = await User.findById(req.params.id);
const clientId = req.body.attributes.ClientId;
user.avayaClientId = clientId;
user.save();
});
router.route("/getid/:id").get(async (req, res) => {
const user = await User.findById(req.params.id);
res.send(user.avayaClientId);
});
router.route("/getlogs/:id").post(async (req, res) => {
const user = await User.findById(req.params.id);
const arranged = {
NotificationType: req.body.children[0].name,
ResponseCode: req.body.attributes.ResponseCode,
CallType: " ",
};
for (let i in req.body.children[0].attributes) {
if (i === "Outbound") {
arranged.CallType = "Outbound";
}
if (i === "Inbound") {
arranged.CallType = "Inbound";
}
arranged[i] = req.body.children[0].attributes[i];
}
console.log(arranged);
if (
arranged.NotificationType === "VoiceInteractionCreated" ||
arranged.NotificationType === "VoiceInteractionMissed" ||
arranged.NotificationType === "VoiceInteractionTerminated"
) {
const newLogs = new Avaya({
notification: arranged,
});
newLogs.owner = user;
newLogs.save();
user.avayaNotifications.push(newLogs),
user
.save()
.then((logs) => res.json(logs))
.catch((err) => res.status(400).json("Error: " + err));
} else {
res.send("Nothing to record");
}
});
router.route("/history/:username").get(async (req, res) => {
const user = await User.findOne({ username: [`${req.params.username}`] });
Avaya.find({ owner: [`${await user.id}`] }).then((user) => res.json(user));
});
module.exports = router;
I really don't get the part of (requesting with Axios in API)
Is this a third party API ?
But I suggest you to use (.env) which is a file in your root folder contains the development config like base URLs, expire tokens, API keys ... etc
and when you upload to Heroku you have to make a (.env) in Heroku app and but your config
Let's take an example
in my development mode, my .env looks like
app_url = localhost:4000
port = 4000
db = development_api
db_username = root
db_password =
db_engine = mysql2
in my production mode, my .env looks like
app_url = http://appsomething.heroku.com
port = 80
db = production_api
db_username = root
db_password = 3210LDWAK#AALKQ
db_engine = mysql2
and read more about how to use .ENV

Categories