SQLite Filename cannot be null / undefined - javascript

I'm following this tutorial where we're creaing an app using next.js. We're using sqlite, and testing a database. In the tutorial we write the following 'database-test.js' file:
const sqlite = require('sqlite');
async function setup() {
const db = await sqlite.open('./mydb.sqlite');
await db.migrate({force: 'last'});
const people = await db.all('SELECT * FROM person');
console.log('ALL PEOPLE', JSON.stringify(people, null, 2));
const vehicles = await db.all('SELECT * FROM vehicle');
console.log('ALL VEHICLES', JSON.stringify(vehicles, null, 2));
}
setup();
I get the following error when I $node database-test.js:
(node:26446) UnhandledPromiseRejectionWarning: Error: sqlite: filename cannot be null / undefined
I don't really understand why we are opening a .sqlite file, and not a .db file. I've made sure I have the correct path to the .sqlite file. What is the cause of this error and how might I fix it? I can't seem to find any other documentation or examples of the .open function.

As #ANimator120 was mentioned but with some tweaks.
Use require because it runs on server side.
Install sqlite3 by npm i sqlite3.
Then add path to your migrations folder if it's not in the project root.
const sqlite3 = require('sqlite3');
const sqlite = require('sqlite');
async function openDb() {
return sqlite.open({
filename: './database.db',
driver: sqlite3.Database,
});
}
async function setup() {
const db = await openDb();
await db.migrate(
{
migrationsPath: './src/migrations', //add cutom path to your migrations
force: 'last'
}
);
const people = await db.all('SELECT * FROM Person');
console.log('all person', JSON.stringify(people, null, 2));
const vehicle = await db.all(`SELECT a.*, b.* FROM Person as a
LEFT JOIN Vehicle as b
ON a.id = b.ownerId
`);
console.log('all vehicles', JSON.stringify(vehicle, null, 2));
}
setup();
Everything working fine, at least for me.

You need to npm install both sqlite and sqlite3
The correct way to use the open() method would be.
const sqlite = require('sqlite');
const sqlite3 = require('sqlite3')
const {open} = require('sqlite')
async function openDB (){
return open({
filename : './mydb.sqlite',
driver: sqlite3.Database
})
}
async function setup(){
const db = await openDB();
await db.migrate({force : 'last'});
}
setup();

Turns out they were using sqlite 3.0.3 in the tutorial. With sqlite3, the correct way to use open() in this case is:
import { open } from 'sqlite'
import sqlite3 from 'sqlite3'
// you would have to import / invoke this in another file
export async function openDB () {
return open({
filename: './mydb.sqlite',
driver: sqlite3.Database
})
}

Related

Electron nedb-promises storage file gets replaced on every app start

I'm now trying for a few hours to understand why this happens, in my electron app i would like to use the nedb-promises package("nedb-promises": "^6.2.1",). Installation and configuration works so far but on every app start (dev & prod) the db file got replaced by a new / empty one. Should'nt the package not handle that?
I've took the code from this example:
https://shivekkhurana.medium.com/persist-data-in-electron-apps-using-nedb-5fa35500149a
// db.js
const {app} = require('electron');
const Datastore = require('nedb-promises');
const dbFactory = (fileName) => Datastore.create({
filename: `${process.env.NODE_ENV === 'development' ? '.' : app.getPath('userData')}/data/${fileName}`,
timestampData: true,
autoload: true
});
const db = {
customers: dbFactory('customers.db'),
tasks: dbFactory('tasks.db')
};
module.exports = db;
import db from './db'
....
// load task should not be important because file is already replaced when arriving here
ipcMain.handle('Elements:Get', async (event, args) => {
// 'Select * from Customers
let data = await db.customers.find({});
console.log(data);
return data;
})
...
// Set an item
ipcMain.handle('Element:Save', async (event, data) => {
console.log(data)
const result = db.customers.insertOne(data.item).then((newDoc) => {
console.log(newDoc)
return newDoc
}).catch((err) => {
console.log("Error while Adding")
console.log(err)
});
console.log(result);
return result;
})
Note: After "adding" newDoc contains the new element and when checking the file manually in the filesystem it is added. When i now close the app and open again the file got replaced.
I've checked the docs up and down - i have no clue what i'm doing wrong - thanks for your help.

Got the error code "TypeError: Cannot read properties of null (reading 'sendTransaction')" while trying to deploy to the goerli test network

Here is my hardhat.config.js code
// hardhat.config.js
require("#nomiclabs/hardhat-ethers");
require("#nomiclabs/hardhat-waffle");
require("dotenv").config()
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
const GOERLI_URL = process.env.GOERLI_URL;
const PRIVATE_KEY = process.env.PRIVATE_KEY;
/**
* #type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
solidity: "0.8.4",
networks: {
goerli: {
url: process.env.GOERLI_URL || "https://eth-goerli.alchemyapi.io/v2/PT8BEXCUKwsfvcBz8y3g7A2V7LdhUKQA",
accounts: process.env.PRIVATE_KEY
}
}
};
And my deploy.js script:
// scripts/deploy.js
const hre = require("hardhat");
async function main() {
// We get the contract to deploy.
const BuyMeACoffee = await hre.ethers.getContractFactory("BuyMeACoffee");
const buyMeACoffee = await BuyMeACoffee.deploy();
await buyMeACoffee.deployed();
console.log("BuyMeACoffee deployed to:", buyMeACoffee.address);
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
I really do not know how to get it sorted and it pretty much has left me in a loop.
Please help. Thank You
I am pretty much new to using hardhat and npm. My current node v16.13.1 (npm v8.1.2)
There was an error in the hardhat.config file instead of accounts:process.env.PRIVATE_KEY, it should have been accounts:[process.env.PRIVATE_KEY]

Sveltekit & Fleek (IPFS) import syntax problem?

I have managed to use fleek to update IPFS via straight javascript. I am now trying to add this functionality to a clean install of a svelteKit app. I think I am having trouble with the syntax around imports, but am not sure what I am doing wrong. When I click the button on the index.svelte I get the following error
Uncaught ReferenceError: require is not defined
uploadIPFS upload.js:3
listen index.mjs:412..........(I truncated the error here)
A few thoughts
I am wondering if it could be working in javascript because it is being called in node (running on the server) but running on the client in svelte?
More Details
The index.svelte file looks like this
<script>
import {uploadIPFS} from '../IPFS/upload'
</script>
<button on:click={uploadIPFS}>
upload to ipfs
</button>
the upload.js file looks like this
export const uploadIPFS = () => {
const fleek = require('#fleekhq/fleek-storage-js');
const apiKey = 'cZsQh9XV5+6Nd1+Bou4OuA==';
const apiSecret = '';
const data = 'pauls test load';
const testFunctionUpload = async (data) => {
const date = new Date();
const timestamp = date.getTime();
const input = {
apiKey,
apiSecret,
key: `file-${timestamp}`,
data
};
try {
const result = await fleek.upload(input);
console.log(result);
} catch (e) {
console.log('error', e);
}
};
testFunctionUpload(data);
};
I have also tried using the other import syntax and when I do I get the following error
500
global is not defined....
import with the other syntax is
import fleekStorage from '#fleekhq/fleek-storage-js';
function uploadIPFS() {
console.log('fleekStorage',fleekStorage)
};
export default uploadIPFS;
*I erased the api secret in the code above. In future I will store these in a .env file.
Even more details (if you need them)
The file below will update IPFS and runs via the command
npm run upload
That file is below. For my version that I used in svelte I simplified the file by removing all the file management and just loading a variable instead of a file (as in the example below)
const fs = require('fs');
const path = require('path');
const fleek = require('#fleekhq/fleek-storage-js');
require('dotenv').config()
const apiKey = process.env.FLEEK_API_KEY;
const apiSecret = process.env.FLEEK_API_SECRET;
const testFunctionUpload = async (data) => {
const date = new Date();
const timestamp = date.getTime();
const input = {
apiKey,
apiSecret,
key: `file-${timestamp}`,
data,
};
try {
const result = await fleek.upload(input);
console.log(result);
} catch(e) {
console.log('error', e);
}
}
// File management not used a my svelte version to keep it simple
const filePath = path.join(__dirname, 'README.md');
fs.readFile(filePath, (err, data) => {
if(!err) {
testFunctionUpload(data);
}
})

Dynamically change database with Knex queries

I have an issue when I am trying to set Knex database dynamically. I have multiple database, which increments in every hour. ( e.g db-1-hour, db-2-hour). When we switched into a new hour I wan to use the next database. I created a sample function which returns a new Knex function based on the new database but I got a deprecated warning.
My config
import knex from 'knex';
const knexConfig = {
client: 'pg',
connection: {
host: host,
port: port,
user: user,
database: '',
password: password,
},
pool: {
min: 2,
max: 10,
},
timezone: 'UTC',
};
exports const getCurrentDb = async () => {
const hourDb = await getLatestDbName()
cons knexConfig.connection.database = hourDb; // I update the database name
return knex(knexConfig);
}
Usage
import { getCurrentDb } from "./config"
const getSomething = async () => {
const db = await getCurrentDb()
return db.select().from("something")
}
The code is working but I always get this waring message:
calling knex without a tableName is deprecated. Use knex.queryBuilder() instead.
How could I connect to a database dynamically? Thank you in advance!
The warning message is not related to the DB switch mechanism.
Try to change your select statement to something like:
import { getCurrentDb } from "./config"
const getSomething = async () => {
const db = await getCurrentDb()
return db("something").columns('*')
}

asnyc function for retrieving Google Secret Manager Variable returns promise pending

I am trying to switch my environment variables to Google Secrets Manager, but I am encountering a problem. I am running an expressjs api trying to establish a database connection. But whatever I try, it only returns Promise { <pending> } instead of waiting for the async function to finish. Any help is highly appreciated!
gcloud.js
const { SecretManagerServiceClient } = require('#google-cloud/secret-manager');
const client = new SecretManagerServiceClient();
async function getSecret(name) {
const name = `projects/PROJECT-ID/secrets/${name}/versions/latest`;
const [version] = await client.accessSecretVersion({
name: name,
});
const payload = version.payload.data.toString();
return payload;
}
module.exports.getSecret = getSecret;
config.js
const gcloud = require('./config/gcloud.js')
const config = {
authSecret: gcloud.getSecret(SECRET_NAME)
}
module.exports = config
You need to await the result of gcloud.getSecret:
// Not actually valid
const config = {
authSecret: await gcloud.getSecret(SECRET_NAME)
}
However, top-level await isn't widely supported yet, so you'll need to build the config inside a function: NodeJS Async / Await - Build configuration file with API call

Categories