I am trying to create a command for a Discord.js bot that when triggered, will use an array created from a folder, select a random image, then send it.
I can't set the array manually because the amount of images in the folder is always going to be changing.
This is what I have:
if(command.toLowerCase() == "command"){
const fs = require('fs');
const imageFolderLocation = "./images/";
var imageArray = [];
fs.readdir(imageFolderLocation, (err, files) => {
imageArray = files;
});
let randomImage = Math.floor(Math.random() * imageArray.length) + 1;
for (var i = 0; i < imageArray.length; i++) {
message.channel.sendFile(randomImage);
}
return;
}
I tried starting the bot with --trace-warnings but it was to no avail. No warnings printed to console.
Some issues I see:
fs.readdir is async, you cannot use files/imageArray outside of the callback
you send the random int instead of an image
you send the randomImage imageArray.length times
you ignore err
Corrected code:
if (command.toLowerCase() == "command") {
const fs = require('fs');
const imageFolderLocation = "./images/";
fs.readdir(imageFolderLocation, (err, imageArray) => {
// TODO if (err) { /* print to console or something */ }
let randomIndex = Math.floor(Math.random() * imageArray.length) + 1;
let randomImage = imageArray[randomIndex];
message.channel.sendFile(randomImage);
});
return;
}
Related
I am kinda new to NodeJs. I have been trying to make an API which can classify an image and give the resultant in a Json object. The resultant is score prediction using NSFWJS.
My task is when I pass a JSON object with a {key:value} pair where key is "Codes" and the values is a list of Codes which is using to fetch url object, it should parse through every image and give the resultant output.
* Basic flow of the code *
Input : A Json object something like this :- * {'Codes':['Code1','Code2','Code3']} *
Loop to all the objects from the value :
1. parse through a function to store the image
2. pas through the model to get the prediction
3. Stored the resultant in a Json object
Return the final Json object
This the code of my implementation :-
const express = require('express')
const multer = require('multer')
const jpeg = require('jpeg-js')
const tf = require('#tensorflow/tfjs-node')
const nsfw = require('nsfwjs')
const bodyParser=require("body-parser")
const axios = require('axios');
const cheerio = require('cheerio');
var fs = require('fs');
var http = require('http');
var request=require('request');
const app = express()
const upload = multer()
app.use(bodyParser.json());
let _model
let file_content;
let test_data;
function convertBuffer()
{
return new Promise(function(resolve,reject)
{
try
{
file_content=fs.readFileSync('ans1.png');
resolve(file_content);
}
catch
{
reject('Null String')
}
})
}
app.post('/nsfw',async(req,res) =>{
var arrayCodes=req.body.Codes;
var jsonObj={};
var i=0;
var iterator=function(i){
if(i>arrayCodes.length)
{
console.log("Testing",jsonObj);
res.json(jsonObj);
return;
}
}
while(i<arrayCodes.length)
{
var imgUrl="http://url/to/image"+String(Codes[i])
//getImageUrl(imgUrl);
await axios.get(imgUrl)
.then(response=>{
var result=response.data;
var resultJson=result.facets.Media.productImage.url;
var localPath="ans1.png"
var fullUrl = resultJson;
var download=function(uri,filename,callback){
request.head(uri,function(err,res,body){
// console.log('Content-Type:',res.headers['content-type']);
// console.log('Content-length:',res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename).on('close',callback));
});
}
download(fullUrl,localPath,function(){console.log("Done")});
var convertBufferdata=convertBuffer();
convertBufferdata.then(async function(result){
var image =await jpeg.decode(file_content, true)
const numChannels = 3
const numPixels = image.width * image.height
const values = new Int32Array(numPixels * numChannels)
for (let i = 0; i < numPixels; i++)
for (let c = 0; c < numChannels; ++c)
values[i * numChannels + c] = image.data[i * 4 + c]
test_data=tf.tensor3d(values, [image.height, image.width, numChannels], 'int32')
var predictions=await _model.classify(test_data);
jsonObj[String(arrayAsins[i])]=predictions;
console.log('JsonObj',jsonObj);
i=i+1;
console.log("I going out is",i);
iterator(i);
},function(err){console.log(err)})
})
.catch(error=>{
i=arrayCodes.length+1;
console.log(error);
})
}
})
const load_model = async () => {
_model = await nsfw.load()
}
load_model().then(() => app.listen(8080))
What problem I am facing is that the code is not completely synchronous due to which the resultant Json object is maybe having more keys than it should be or maybe not all the values retrieved are correct (meaning that it is getting overwritten).
* After every time the single image gets fetched and processed , the next image is then download/overwritten to the same image *
Any kind of help in refactoring of the code or making some changes to fix the issue will be appreciated.
I want to access shopify api using Node.js with request method. I get first 50 items but i need to send the last id of the products i get as a response so it can loop through all the products until we don't have another id (i check that if the last array is not 50 in length.)
So when i get the response of lastID i want to feed that again to the same function until the Parraylength is not 50 or not 0.
Thing is request works asynchronously and i don't know how to feed the same function with the result lastID in node.js.
Here is my code
let importedData = JSON.parse(body);
//for ( const product in importedData.products ){
// console.log(`${importedData.products[product].id}`);
//}
lastID = importedData.products[importedData.products.length-1].id;
let lastIDD = lastID;
console.log(`This is ${lastID}`);
importedData ? console.log('true') : console.log('false');
let Prarraylength = importedData.products.length;
console.log(Prarraylength);
//console.log(JSON.stringify(req.headers));
return lastIDD;
});```
You can use a for loop and await to control the flow of your script in this case.
I'd suggest using the request-native-promise module to get items, since it has a promise based interface, but you could use node-fetch or axios (or any other http client) too.
In this case, to show you the logic, I've created a mock rp which normally you'd create as follows:
const rp = require("request-promise-native");
You can see we're looping through the items, 50 at a time. We're passing the last id as a url parameter to the next rp call. Now this is obviously going to be different in reality, but I believe you can easily change the logic as you require.
const totalItems = 155;
const itemsPerCall = 50;
// Mock items array...
const items = Array.from({ length: totalItems}, (v,n) => { return { id: n+1, name: `item #${n+1}` } });
// Mock of request-promise (to show logic..)
// Replace with const rp = require("request-promise-native");
const rp = function(url) {
let itemPointer = parseInt(url.split("/").slice(-1)[0]);
return new Promise((resolve, reject) => {
setTimeout(() => {
let slice = items.slice(itemPointer, itemPointer + itemsPerCall);
itemPointer += itemsPerCall;
resolve( { products: slice });
}, 500);
})
}
async function getMultipleRequests() {
let callIndex = 0;
let lastID = 0;
const MAX_CALLS = 20;
const EXPECTED_ARRAY_LENGTH = 50;
for(let callCount = 1; callCount < MAX_CALLS; callCount++) {
// Replace with the actual url..
let url = "/products/" + lastID;
let importedData = await rp(url);
lastID = importedData.products[importedData.products.length - 1].id;
console.log("Call #: " + ++callIndex + ", Item count: " + importedData.products.length + ", lastID: " + lastID);
if (importedData.products.length < EXPECTED_ARRAY_LENGTH) {
console.log("Reached the end of products...exiting loop...");
break;
}
}
}
getMultipleRequests();
Stock Overflow -
I'm trying to process an image collection (~2000 images) with NodeJS. I'm able to extract the information needed, but I'm having a hard time getting the timing right so that I can save the outcome to a JSON file.
Towards the end you'll see
console.log(palette);
// Push single image data to output array.
output.push(palette);
The console.log works fine, but the the push method is appears to be executed after the empty output array has been written to data.json. Tried having a nested promise where I wouldn't write the file until all images have been processed.
The callback function in getPixels gets executed asynchronously.
The order of the output array is arbitrary.
Any and all help greatly appreciated! Thank you!
// Extract color information from all images in imageDirectory
var convert = require('color-convert'),
fs = require('fs'),
getPixels = require("get-pixels"),
startTime = Date.now();
var processedImages = new Promise((resolve, reject) => {
var imageDirectory = 'input',
images = fs.readdirSync(imageDirectory),
output = [];
console.log('Found ' + images.length + ' images.');
for (var image in images) {
var imageLoaded = new Promise((resolve, reject) => {
getPixels(imageDirectory + '/' + images[image], function(error, pixels) {
if(error) {
return 'Bad image path';
}
resolve(pixels);
});
});
imageLoaded.then((pixels) => {
var palette = {
coloredPixels : 0,
hues : [],
image : images[image],
classification : false,
pixelCount : null
};
palette.pixelCount = pixels.shape[0] *
pixels.shape[1] *
pixels.shape[2];
for (var i = 0; i < 256; i++) {
palette.hues[i] = 0;
}
for (var i = 0; i < palette.pixelCount; i += 4) {
var rgb = [pixels.data[i ],
pixels.data[i + 1],
pixels.data[i + 2]],
hsl = convert.rgb.hsl(rgb),
hue = hsl[0],
saturation = hsl[1];
if (saturation) {
palette.hues[hue]++;
palette.coloredPixels++;
}
}
console.log(palette);
// Push single image data to output array.
output.push(palette);
});
}
resolve(output);
});
processedImages.then((output) => {
// write output array to data.json
var json = JSON.stringify(output, null, 2);
fs.writeFileSync('data.json', json);
// Calculate time spent
var endTime = Date.now();
console.log('Finished in ' + (endTime - startTime) / 1000 + ' seconds.');
});
What you want to do is transform an array of "images" to an array of promises and wait for all promises to resolve, and then perform more transformations. Think of it as a series of transformations, because that's what you're doing here. In a nutshell:
const imagePromises = images.map(image => new Promise(resolve, reject) {
getPixels(imageDirectory + '/' + image, (error, pixels) => {
if(error) {
reject('Bad image path');
return;
}
resolve(pixels);
});
const output = Promise.all(imagePromises).then(results =>
results.map(pixels => {
return {
// do your crazy palette stuff (build a palette object)
};
});
This code gets the data required from the python code, which in turn fetches the list of objects from aws s3. it gets the code using stdout. I've managed to break the string i fetch from python, tokenize it and put it in an javascript array. The issue lies when i try to put it in the collection using the code below. It shows garbage values have been put into the collection.
here is the python code
import sys
import boto
from boto.s3.connection import S3Connection
AWS_KEY = '##################'
AWS_SECRET = '*********************************'
REGION_HOST = 's3.ap-south-1.amazonaws.com'
aws_connection = S3Connection(AWS_KEY, AWS_SECRET, host=REGION_HOST)#,
bucket = aws_connection.get_bucket('####3')
z = 0
stp = ""
for file_key in bucket.list():
p = file_key.name
y = p.split("-")
for a in y:
z = z + 1
if(z % 2 == 0):
stp = stp + a + " "
z = 0
print(stp)
sys.stdout.flush()
Here is the java script code.
The python code is called by the javascript
i get back the string in javascript by stdout.
I split the string store it in a variable in javascript.
Problem is that i dont know how exactly i am supposed to insert in my collection.
Garbage = new Mongo.Collection("garbag");
if (Meteor.isServer) {
var exec = Npm.require('child_process').exec;
var Fiber = Npm.require('fibers');
var Future = Npm.require('fibers/future');
var out;
var arr;
var a = 3
Meteor.methods({
callPython: function() {
var fut = new Future();
exec('python /home/piyush/pycloud/v1.py', function (error, stdout, stderr) {
if (error) {
throw new Meteor.Error(error, error);
}
if (stdout) {
console.log(stdout);
out = stdout;
arr = out.split(" ");
for(i = 0; i < 10; i++){
console.log(arr[i]);
}
}
if (stderr) {
console.log("mital");
}
new Fiber(function() {
fut.return('Python was here');
}).run();
});
console.log("miitttaall");
console.log(arr);
if(a < 5){
var crr = [{area: "pashan" , amount : arr[0]},
{area: "balewadi", amount : arr[1]},
{area: "aundh" , amount : arr[2]}];
_.each(crr , function(doc){
Garbage.insert(doc);
});
console.log("willthisprint");
}
return arr;
},
});
}
I am trying to select a number of random files from a given directory. Below is my current implementation; however, there are too many files inside the folder iterating them all and then pick few random ones seems overkill.
Is there a better solution? Because what I am thinking is knowing all the files inside the folder is the precondition for random selection?
const dirs = fs.readdirSync(IMAGE_BANK_SRC)
.map(file => {
return path.join(IMAGE_BANK_SRC, file);
});
const srcs_dup = [];
dirs.forEach(path => {
fs.readdirSync(path).forEach(file => {
srcs_dup.push(file);
});
});
// Pick few random ones from `srcs_dup`
Requirements:
The selected random files should be unique
The code is still working if the folder contains less files than expected
As fast as possible
Well, readDir & readDirSync return an array. You could avoid mapping through the entire array of paths by using the length property. We can make a dynamic sample set using a percentage of the length, then store the samples in a new array.
const dirs = fs.readdirSync(IMAGE_BANK_SRC);
const length = dirs.length;
const sampleSet = 25/100 * length;
const getRandomIndex = length => Math.floor( Math.random() * length );
let samples = [];
let usedIndices = [];
let randomIndex = undefined;
for (let i = 0; i < sampleSet; i++){
do {
randomIndex = getRandomIndex( length );
}
while ( usedIndices.includes( randomIndex ) );
usedIndicies.push( randomIndex );
samples.push( dirs[randomIndex] );
}
Basically in the below code, I created randomIndex() which grabs a random file index. After you get the list of files. I do a while loop to grab a random file from the directory list and add it to the array.
//Grabs a random index between 0 and length
function randomIndex(length) {
return Math.floor(Math.random() * (length));
}
//Read the directory and get the files
const dirs = fs.readdirSync(IMAGE_BANK_SRC)
.map(file => {
return path.join(IMAGE_BANK_SRC, file);
});
const srcs_dup = [];
const hashCheck = {}; //used to check if the file was already added to srcs_dup
var numberOfFiles = dirs.length / 10; //OR whatever # you want
//While we haven't got the number of files we want. Loop.
while (srcs_dup.length < numberOfFiles) {
var fileIndex = randomIndex(dirs.length-1);
//Check if the file was already added to the array
if (hashCheck[fileIndex] == true) {
continue; //Already have that file. Skip it
}
//Add the file to the array and object
srcs_dup.push(dirs[fileIndex]);
hashCheck[fileIndex] = true;
}
console.log(srcs_dup); //The list of your files
If this doesn't work. Let me know.
Here's a simplistic implementation. You should also consider using the path.resolve() method.
const dirs = fs.readdirSync(IMAGE_BANK_SRC)
.map((e) => { return path.join(IMAGE_BANK_SRC, e); });
// New random list of dirs
const randomList = dirs.slice(0)
.map((e) => { return Math.random() < .5 ? e : null; })
.filter((e) => { return e != null; });
First, you no need to map to concat your directory path, this will loop through entire file 1 time.
Second, just loop number of files you need
let result = []
let requiredCount = 3;
let files = fs.readdirSync(IMAGE_BANK_SRC)
while(requiredCount-- && files.length) {
let length = files.length;
let selectedIndex = Math.floor(Math.random() * length)
let selected = files.splice(selectedIndex, 1);
result.push(path.join(IMAGE_BANK_SRC, selected))
}