Load local dll with node-ffi: No such file - javascript

I try to load a local .dll according the examples on stackoverflow and node-ffi documentation.
But I get the error ENOENT: no such file or directory, open '../test/user32.dll.so'. The file is there (no exception).
The extension '.so' is added automatically. Any idea what I'm doing wrong? Is this code plattform dependent? I'm on Debian.
const path = require('path');
const fs = require('fs');
const ffi = require('ffi');
function setCursor() {
const dllFile = path.join('../test', 'user32.dll');
if (!fs.existsSync(dllFile)) {
throw (new Error('dll does not exist'));
}
const user32 = ffi.Library(dllFile, {
"SetCursorPos": [
"bool", ["int32", "int32"]
]
});
console.log(user32.SetCursorPos(0, 0));
}
setCursor();

It looks like path doesn't recognize ../test as being the parent folder. I think path.join(__dirname, '..', 'test', 'user32.dll'); should get you to the right place.

Related

Spawn process inside process or detached it pkg builder

I dont sure what the problem here, mongod process not spawn inside program.exe that create with pkg. i test it first before compile the script can launch mongod process. after i tested it, spawn cant read pkg filesystem ( snapshot ).
const { spawn } = require('child_process');
const { parse } = require('path')
let processPath = parse(process.argv[0]);
let processDir = processPath.dir;
const args = [
'-f', `${__dirname}\\configs\\mongodb.yml`,
'--dbpath', `${processDir}\\database\\data`,
'--logpath', `${processDir}\\database\\log\\system.log`,
];
const options = {
cwd: `${processDir}\\bin`
};
const mongod = spawn('mongod', args, options);
mongod.stdout.on('data', chunk => {
console.log(chunk.toString())
});
mongod.stdout.on('error', chunk => {
console.log(chunk.toString())
});
mongod.on('spawn', () => {
console.log('success')
});
mongod.on('error', function(error) {
console.log(error)
});
Build Dir
build
build/program.exe
build/bin
build/bin/mongod.exe
build/database
build/database/data
build/database/log/system.log
Package.json pkg configurations
"bin": "dist/application.js",
"pkg": {
"targets": ["node16-win-x64"],
"outputPath": "dist/build",
"assets": [
"dist/configs/*"
]
}
Here is my solution to this issue, tested on Linux Ubuntu 22.04 LTS.
Case scenario:
I needed to include an executable file hello_world as an asset into /snapshot/project/bin/hello_world virtual path and based on some conditions execute it inside the Linux environment.
The problem:
I was getting the following error when I've been trying to execute the command via child_process.spawn:
/bin/sh: 1: /snaponshot/project/bin/hello_world: not found
So clearly my OS is trying to execute hello_world command via /bin/sh, however, the system is unable to access to /snapshot virtual filesystem, therefor not able to execute it.
The workaround:
Clearly, the main file system is unable to access the virtual file system, but we can do the opposite, by copying our executable file from the virtual file system into the main file system and executing it from there, basically, this is what I did:
//node packages
const fs = require('fs');
const os = require('os');
const path = require('path');
const {execSync, spawn} = require('child_process');
// executable file name
const executable = 'hello_world';
//file path to the asset executable file
const remoteControlFilePath = path.join(__dirname, `../bin/${executable}`);
let executableFileFullPath = remoteControlFilePath;
// avoid the workaround if the parent process in not pkg-ed version.
if (process.pkg) {
// creating a temporary folder for our executable file
const destination = fs.mkdtempSync(`${os.tmpdir()}${path.sep}`);
const destinationPath = path.join(destination, executable);
executableFileFullPath = destinationPath;
// copy the executable file into the temporary folder
fs.copyFileSync(remoteControlFilePath, destinationPath);
// on Linux systems you need to manually make the file executable
execSync(`chmod +x ${destinationPath}`);
}
// using {detached: true}, execute the command independently of its parent process
// to avoid the main parent process' failing if the child process failed as well.
const child = spawn(executableFileFullPath, {detached: true});
child.stdout.on('data', (data) => {
console.log(`child stdout:\n${data}`);
});
child.stderr.on('data', (data) => {
console.error(`child stderr:\n${data}`);
});
child.on('exit', (code, signal) => {
console.log(`child process exited with code ${code} and signal ${signal}`);
});

how could i use require() to read a local json file by just adding the local project json file path?

This is how I managed to be able to read the json file
posts.js file
const PATH = require('/Users/jorgesisco/Dropbox/Programming_Practice/Web_Development/PWJ/Module-8/Blog/pwj-module-8-my-blog-api/exercise/data.json');
class Post {
get() {
// get posts
}
getIndividualBlog() {
// get one blog post
}
addNewPost() {
// add new post
}
readData() {
return PATH;
}
}
module.exports = Post;
Now in app.js I call the function and I am able to see the json file in postman.
// 1st import express
const express = require('express');
const app = express();
const Post = require('./api/models/posts');
const postsData = new Post();
const posts = [
{
id: '1581461442206',
title: 'This is a New Blog Post',
content: 'This is the content! ',
post_image: 'uploads/post-image-1581461442199.jpg',
added_date: '1581461442206',
},
];
// const result = posts.flatMap(Object.values);
app.get('/api/posts', (req, res) => {
res.status(200).send(postsData.readData());//here I call the function to see json file in postman
});
app.listen(3000, () => console.log('listening on http://localhost:3000'));
I think I shouldn't use the whole file path for the json file but when I just use something like ./data.json an error happen because it can't find the json file.
For accessing the files from the same dir you need to pass './'
Example: require('./data.json');
for accessing the files from one dir out from current dir '../'
Example: require('../data.json');
For accessing the files from two dir out within different folder '../../foldername/data.json'
Example: require('../../dataFolder/data.json');
The file path is referenced from the directory where you run the main code (app.js)
Move your data.json file inside the directory where your code is located. Let's say main_code/datadir. So, it looks like this now -
-- maincode
-- datadir
-- data.json
-- posts.js
-- app.js
Refer the file in posts.js code as require('./datadir/data.json')(Assuming datadir/ is in the same path/level as your app.js code)
Run app.js

How to import object to another file?

I'm using Node.js to build a web application.
I have two files. Server.js - where I call the server to go online etc. And the other one is a file which includes a big object with data. I imported the file with data into server.js, I get the object in postman when I set the website to be live. But I can't dive inside the object to get the data inside the object. The error I'm getting says that the variable where my data is stored is not defined.
I think the fix is to import albumsData variable into server.js, but im completely stuck and can't find how to do it. If anyone has any idea, please share.
albumsData.js
const express = require('express');
const router = express.Router();
let albumsData = {
filled with data
}
router.get('/albumData', (req, res) => {
res.send(albumsData);
});
module.exports = router;
Server.js
app.use(require('./api/albumData/unikkatilData'))
app.use((req, res) => {
res.status(404)
.send(albumsData)
});
app.listen(4000, () => {
console.log('hello worldd')
})
If you want the albumsData object then you can do it like this:
In you albumsData.js file:
const albumsData = {
// Bunch of data
}
module.exports = albumsData
Then in your server.js file:
const albumData = require('./api/albumsData') // Make sure this path points directly to the albumsData.js file
move enter code here to new file (ex utils.js)
and export it exports.albumsData = albumsData; then you can call it
with const utils = require('./utils') ; utils.albumsData

Relative path on fs.watchfile using Node.js

I use this method to read data from the file when it changes. I restructured my project and now the data isn't read anymore.
fs.watchFile("???", (curr, prev) => {
console.log("Änderung auf Data.json");
fs.readFile("Data.json", "utf8", (err, data) => {
widgets = JSON.parse(data);
console.log("Daten ausgelesen" );
io.emit("dataUpdate", {widgets});
});
});
You can see my file paths here. The method above is in client.js and I want to refer to Data.json.
I tried:
"../Data.json" and
"/Server/Data.json"
Thanks for your help!
PS.: I know that there are threads explaining relative paths, but I still couldn't fix my problem.
You can use __dirname https://nodejs.org/docs/latest/api/modules.html#modules_dirname
example:
const path = require('path');
const filePath = path.join(__dirname, '..', 'data.json');

Copy a source file to another destination in Nodejs

I'm trying to copy an image from a folder to another using fs-extra module .
var fse = require('fs-extra');
function copyimage() {
fse.copy('mainisp.jpg', './test', function (err) {
if (err)
return console.error(err)
});
}
This is my directory
and this is the error I get all the time:
Error {errno: -4058, code: "ENOENT", syscall: "lstat", path:
"E:\mainisp.jpg", message: "ENOENT: no such file or directory, lstat
'E:\mainisp.jpg'"}
and by changing destination to ./test/ I get this error
Error {errno: -4058, code: "ENOENT", syscall: "lstat", path:
"E:\Development\Node apps\Node softwares\Digital_library\mainisp.jpg",
message: "ENOENT: no such file or directory, lstat 'E:\Devel…
apps\Node softwares\Digital_library\mainisp.jpg'"}
Note: I'm not testing this in browser. It's an Nwjs app and the pics of error attached are from Nwjs console.
You can do this using the native fs module easily using streams.
const fs = require('fs');
const path = require('path');
let filename = 'mainisp.jpg';
let src = path.join(__dirname, filename);
let destDir = path.join(__dirname, 'test');
fs.access(destDir, (err) => {
if(err)
fs.mkdirSync(destDir);
copyFile(src, path.join(destDir, filename));
});
function copyFile(src, dest) {
let readStream = fs.createReadStream(src);
readStream.once('error', (err) => {
console.log(err);
});
readStream.once('end', () => {
console.log('done copying');
});
readStream.pipe(fs.createWriteStream(dest));
}
Try:
var fs = require('fs-extra');
fs.copySync(path.resolve(__dirname,'./mainisp.jpg'), './test/mainisp.jpg');
As you can see in the error message, you're trying to read the file from E:\mainisp.jpg instead of the current directory.
You also need to specify the target path with the file, not only the destination folder.
Try:
const fs = require('fs');
fs.copyFileSync(src, dest);

Categories