I made a websocket server using nodeJS, and my problem is as follow : two clients will connect to the server. When the client 1 send a data to the server, I want to forward it to client 2.
In order to do this, I took inspiration from this kind of solution :https://github.com/websockets/ws/issues/367
Looks great, but when I apply it, it gaves me the following error :
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object. Received an instance of Object
at new NodeError (node:internal/errors:371:5)
at Function.from (node:buffer:323:9)
at toBuffer (/home/ttt/node_modules/ws/lib/buffer-util.js:95:18)
at Sender.send (/home/ttt/node_modules/ws/lib/sender.js:315:14)
at WebSocket.send (/home/ttt/node_modules/ws/lib/websocket.js:467:18)
at WebSocket.<anonymous> (/home/ttt/STT_project/sttproject/IHM_server/websocketServer.js:43:15)
at WebSocket.emit (node:events:526:28)
at Receiver.receiverOnMessage (/home/ttt/node_modules/ws/lib/websocket.js:1137:20)
at Receiver.emit (node:events:526:28)
at Receiver.dataMessage (/home/ttt/node_modules/ws/lib/receiver.js:528:14)
code: 'ERR_INVALID_ARG_TYPE'
I tried different methods, but the result remains the same. Does anyone can help me with this ? Or propose a better solution to my problem.
There is a sample of my code:
wss.on("connection", ws => {
ws.id = id++;
lookup[ws.id] = ws;
ws.on("message", data => {
if(data.toString() != "\n")
{
message = {
"message":
{
"text": data.toString(),
"end": false
}
}
}
else {
message = {
"message":
{
"text": "",
"end": true
}
}
}
lookup[0].send('message');
})
...
Related
Problem
I am trying to create a [message object][1] in [discordjs][1] in order to send a message from my account. When I do this, I always get this error:
Cannot read property 'slice' of undefined
Code
Here is my code:
client.on("message", async message => {
if (message.author && message.author.bot) {
return;
}
try {
const myMessage = new Discord.Message(
client,
{ author: { id: "myID" }, content: "My new Message" },
message.channel
);
console.log(myMessage);
Additional Information
I have logged Discord, client and message, all exist and are valid objects. I also replaced the data part of the constructor with this:
{ author: message.author, content: message.content },
but to no avail. The error was the same. So what am I doing wrong here?
Solution
After hacking the source code of discord.js I discovered the culprit in the Message.js file. It is this line:
this.createdTimestamp = SnowflakeUtil.deconstruct(this.id).timestamp;
The problem is that I never passed any id to the constructor function in my data object. Just passing a fake id (a string of random numbers) did the trick.
I am trying to take firebase backup through a cloud function. The function was running completely fine when I was using Runtime: Node.js 8. However, since it is going to be deprecated soon, I now have to use Node.js 10. My fcloud function now fails with the below error:
Error: function execution failed. Details:
Cannot read property 'charCodeAt' of undefined
Detailed error log:
2020-05-27 11:01:21.820 IST
firestore_export
8kxlp9s867dy
TypeError: Cannot read property 'charCodeAt' of undefined at peg$parsetemplate (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:304:17) at Object.peg$parse [as parse] (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:633:18) at new PathTemplate (/workspace/node_modules/google-gax/build/src/pathTemplate.js:55:54) at segments.forEach.segment (/workspace/node_modules/google-gax/build/src/pathTemplate.js:120:29) at Array.forEach (<anonymous>) at PathTemplate.render (/workspace/node_modules/google-gax/build/src/pathTemplate.js:114:23) at FirestoreAdminClient.databasePath (/workspace/node_modules/#google-cloud/firestore/build/src/v1/firestore_admin_client.js:904:57) at exports.scheduledFirestoreBackup (/workspace/index.js:6:31) at Promise.resolve.then (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/invoker.js:330:28) at process._tickCallback (internal/process/next_tick.js:68:7)
Expand all | Collapse all
{
insertId: "000000-f688386c-8f2b-4146-aaf7-1fe67c656fa2"
labels: {…}
logName: "projects/firestore-249705/logs/cloudfunctions.googleapis.com%2Fcloud-functions"
receiveTimestamp: "2020-05-27T05:31:31.171084310Z"
resource: {…}
severity: "ERROR"
textPayload: "TypeError: Cannot read property 'charCodeAt' of undefined
at peg$parsetemplate (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:304:17)
at Object.peg$parse [as parse] (/workspace/node_modules/google-gax/build/src/pathTemplateParser.js:633:18)
at new PathTemplate (/workspace/node_modules/google-gax/build/src/pathTemplate.js:55:54)
at segments.forEach.segment (/workspace/node_modules/google-gax/build/src/pathTemplate.js:120:29)
at Array.forEach (<anonymous>)
at PathTemplate.render (/workspace/node_modules/google-gax/build/src/pathTemplate.js:114:23)
at FirestoreAdminClient.databasePath (/workspace/node_modules/#google-cloud/firestore/build/src/v1/firestore_admin_client.js:904:57)
at exports.scheduledFirestoreBackup (/workspace/index.js:6:31)
at Promise.resolve.then (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/invoker.js:330:28)
at process._tickCallback (internal/process/next_tick.js:68:7)"
timestamp: "2020-05-27T05:31:21.820Z"
trace: "projects/firestore-249705/traces/74e27700d135763bc4e7892ebb1a2333"
}
My index.js is as below:
const firestore = require('#google-cloud/firestore');
const client = new firestore.v1.FirestoreAdminClient();
// Replace BUCKET_NAME
const bucket = 'gs://gcp_firestore_ae2/firestore_export'
exports.scheduledFirestoreBackup = (event, context) => {
const databaseName = client.databasePath(
process.env.GCLOUD_PROJECT,
'(default)'
);
return client
.exportDocuments({
name: databaseName,
outputUriPrefix: bucket,
// Leave collectionIds empty to export all collections
// or define a list of collection IDs:
// collectionIds: ['users', 'posts']
collectionIds: ['most_valuable_items','nric_img','pay_slip','pic_of_ofc_entrance'],
})
.then(responses => {
const response = responses[0];
console.log(`Operation Name: ${response['name']}`);
return response;
})
.catch(err => {
console.error(err);
});
};
I had the same issue. I fixed it by changing:
process.env.GCLOUD_PROJECT
to my actual Project ID (e.g. "my_app_37274")
I had the same issue, It seems like in previous versions we don't need to GCLOUD_PROJECT in environment variable i.e it automatically detects it, but from node 10 onwards we need to pass this explicitly.
That's how I resolved it.
So instead of hardcoding it, try passing GCLOUD_PROJECT in cloud functions environment variables.
Note: GCLOUD_PROJECT is project id, not the project name.
I've got something really weird happening in Lambda. I have a JS function that is interpreting a string input. This code is covered by unit tests to confirm that the validation logic is working as expected. But once it's deployed to Lambda I get different behavior. Is there a really noob error that I'm overlooking. Or is there some setting in Lambda that might explain why my code is being interpreted differently.
Here is my function:
public async processMessage(message: string): Promise<void> {
logger.info(`Message: ${message}`);
const params = JSON.parse(message);
if ('fileId' in params && 'location' in params && 'fileType' in params) {
return this.fileLoader.load(params.fileId, params.location, fromString(params.fileType));
} else {
throw new InvalidArgumentError('A required field on the message is missing.');
}
}
The validation works as expected, the function is invoked correctly. Everything seems great!
Then I deploy it up to lambda and call it there. I get this error:
Cannot use 'in' operator to search for 'fileId' in {\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}
Here's the CloudWatch Logs:
Message: "{\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}"
ERROR Unhandled Promise Rejection
{
"errorType": "Runtime.UnhandledPromiseRejection",
"errorMessage": "TypeError: Cannot use 'in' operator to search for 'fileId' in {\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}",
"reason": {
"errorType": "TypeError",
"errorMessage": "Cannot use 'in' operator to search for 'fileId' in {\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}",
"stack": [
"TypeError: Cannot use 'in' operator to search for 'fileId' in {\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}",
" at FileLoaderMessageHandler.processMessage (/var/task/dist/infrastructure/fileLoaderMessageHandler.js:14:22)",
" at /var/task/dist/aws/fileLoader.js:23:57",
" at Array.forEach (<anonymous>)",
" at Runtime.exports.handle [as handler] (/var/task/dist/aws/fileLoader.js:23:18)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
},
"promise": {},
"stack": [
"Runtime.UnhandledPromiseRejection: TypeError: Cannot use 'in' operator to search for 'fileId' in {\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}",
" at process.<anonymous> (/var/runtime/index.js:35:15)",
" at process.emit (events.js:210:5)",
" at process.EventEmitter.emit (domain.js:476:20)",
" at processPromiseRejections (internal/process/promises.js:201:33)",
" at processTicksAndRejections (internal/process/task_queues.js:94:32)"
]
}
I tried refactoring it to use use Object.keys as well. But it's still performing oddly.
function isFileLoaderParams(params: any): params is FileLoaderParams {
logger.info(`INPUT: ${params}`);
if (!params.fileId) {
logger.warn('FileId not found');
logger.info(`DoubleCheck: ${JSON.stringify(params)}`);
logger.info(`Value Is: ${JSON.stringify(params.fileId)}`);
logger.info(`ArrayAccess?: ${JSON.stringify(params['fileId'])}`);
return false;
}
if (!params.location) {
logger.warn('Location not found');
return false;
}
if (!params.fileType) {
logger.warn('FileType not found');
return false;
}
return true;
}
Which generates this output:
INPUT: {"fileId":"1234","fileType": "TEST_FILE","location": "https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv"}
FileId not found
DoubleCheck: "{\"fileId\":\"1234\",\"fileType\": \"TEST_FILE\",\"location\": \"https://d4-file-proc-staging.s3.amazonaws.com/testFile.csv\"}"
Value Is: undefined
ArrayAccess?: undefined
Configuration/environment details
Written in typescript
Complied by tsc using Typescript 8.3.0
Deployed using Serverless framework
Lambda Runtime: Node.js 12.x (Also ran it in Node 10, but upgraded to 12 trying to fix this)
That error means that params is a String, not an object. Perhaps your message is a double-encoded JSON string (a JSON string containing a JSON string). I'm not sure how it got there, but look for logic that is doing a JSON.stringify on something that's already JSON. Or as #Titus mentioned, doing a double-decode:
const params = JSON.parse(JSON.parse(message));
I was performing development using ipfs - api, I encountered the following error, add of image file to ipfs node does not work well.
Looking at the details of the error, it seems that protocol is treated as undefined among if (protocol.indexOf ('https') === 0) { in request.js.
This is the error description
Uncaught (in promise) TypeError: Cannot read property 'indexOf' of undefined
at webpackJsonp../node_modules/ipfs-api/src/utils/request.js.module.exports (request.js:7)
at requestAPI (send-request.js:165)
at send (send-request.js:196)
at send-files-stream.js:99
at Function.promisify (add.js:41)
at index.js:32
at Object.add (add.js:60)
at VueComponent._callee$ (HaikuCompose.vue?0664:118)
at tryCatch (runtime.js:62)
at Generator.invoke [as _invoke] (runtime.js:296)
This is the code I wrote
import IPFS from "ipfs-api"
const ipfsConf = { host: process.env.IPFSHOST, port: process.env.IPFSPORT, protocol: process.env.IPFSPROTCOL }
const ipfs = new IPFS(ipfsConf)
export default {
name: 'ipfstest',
data() {
return {
file:null,
buffer:null,
ipfsHash:null,
}
},
methods: {
async addipfs() {
await ipfs.add(this.buffer, (err, ipfsHash) => {
console.log(err,ipfsHash);
this.ipfsHash = ipfsHash[0].hash;
})
},
From the module sources, indexOf on line 7 of the request.js file is used on the variable storing the protocol, which is undefined in your case.
And from your code, I think I can safely assume that your environment variable process.env.IPFSPROTCOL is undefined.
TL:DR : I think you wanted to write IPFSPROTOCOL instead of IPFSPROTCOL
I have a local instance of MongoDB, started with --master, and I can see in the start log that I have a replica set oplog (file: /data/db/local.1) created. And it appears local.1 is updated every time I perform and insert().
I am trying to tail / stream the oplog with the following code...
MongoDB.connect('mongodb://localhost:27017/test',(e,db) => {
if ( e ) { return console.log('Connect MongoDB Error',e); }
console.log('Connected MongoDB');
Server.DB = db;
db.collection('oplog.rs').find({}).toArray((e,d) => {
console.log('oplog.rs');
console.log(e);
console.log(d);
})
var updates = db.collection('oplog.rs').find({},{
tailable: true,
awaitData: true,
noCursorTimeout: true,
oplogReplay: true,
numberOfRetries: Number.MAX_VALUE
}).stream();
updates.on('data',(data) => { console.log('data',data); });
updates.on('error',(e) => { console.log('error',e); });
updates.on('end',(end) => { console.log('end',end); });
});
The Code logs the following on start...
oplog.rs
null
[]
However there is no output from stream, I set numberOfRetries to a lower value I get the error..
error { MongoError: No more documents in tailed cursor
at Function.MongoError.create (/Users/peter/node_modules/mongodb-core/lib/error.js:31:11)
at nextFunction (/Users/peter/node_modules/mongodb-core/lib/cursor.js:637:50)
at /Users/peter/node_modules/mongodb-core/lib/cursor.js:588:7
at queryCallback (/Users/peter/node_modules/mongodb-core/lib/cursor.js:211:18)
at Callbacks.emit (/Users/peter/node_modules/mongodb-core/lib/topologies/server.js:116:3)
at Connection.messageHandler (/Users/peter/node_modules/mongodb-core/lib/topologies/server.js:282:23)
at Socket.<anonymous> (/Users/peter/node_modules/mongodb-core/lib/connection/connection.js:273:22)
at emitOne (events.js:96:13)
at Socket.emit (events.js:189:7)
at readableAddChunk (_stream_readable.js:176:18)
name: 'MongoError',
message: 'No more documents in tailed cursor',
tailable: true,
awaitData: true }
end undefined
oplog.rs is used in replica sets. Starting mongodb with --master you are using long-deprecated master-slave replication, which uses local.oplog.$main database, so there is nothings in oplog.rs.
You need to start mongod with --replSet to benefit from replica set oplog. Read more how to deploy and configure replica sets.