const stream = require('getstream');
//newsfeed stream
const client = stream.connect( null, );
var user1 = client.feed('user', 'user1');
// Add activity; message is a custom field - tip: you can add unlimited custom fields!
user1.addActivity({
actor: 'user1',
verb: 'add',
object: 'picture:10',
foreign_id: 'picture:10',
"time": now.toISOString(),
});
// jack's 'timeline' feed follows chris' 'user' feed:
var jack = client.feed('timeline', 'jack');
jack.follow('user', 'user1');
// Read 'timeline' for jack - the post by chris will show up:
jack.get({ limit: 10 }).then(function(results) {
var activityData = results;
// Read the next page, using id filtering for optimal performance:
jack.get({ limit: 10, id_lte: activityData[activityData.length-1].id }).then(function(results) {
var nextActivityData = results;
});
});
// Remove activity by referencing foreign_id:
user1.removeActivity({ foreign_id: 'picture:10' });
In this example I'm in using the code provide to me to create a newsfeed with getstream.io. I have not done anything like this before, so I don't know where to start.
We have a Node-specific "Pinterest" example app you can use to get started, https://github.com/GetStream/Stream-Example-Nodejs
I also recommend you check out our blog post on tips to make a news feed that's really engaging, https://getstream.io/blog/13-tips-for-a-highly-engaging-news-feed/
Related
How can I get a notice when a tweet was deleted using the twitter API?
In version 1.0 of the API, I was able to get the notification in a stream using this:
var Twit = require("twit");
var T = new Twit({
consumer_key: "555",
consumer_secret: "555",
access_token: "555",
access_token_secret: "555",
timeout_ms: 60 * 1000,
strictSSL: true
});
var userIds = [ "123", "456" ];
var stream = T.stream("statuses/filter", { follow: userIds.join(",") });
stream.on("delete", (x) => console.log("Tweet was deleted", x));
However, without notice. The deleted events stopped being streamed.
So now I'm trying to do it with v2 of the twitter API like this:
const BEARER_TOKEN = "555";
const { ETwitterStreamEvent, TweetStream, TwitterApi, ETwitterApiError } = require("twitter-api-v2");
const appClient = new TwitterApi(BEARER_TOKEN);
const stream = await appClient.v2.getStream("tweets/compliance/stream", { partition: 1 });
stream.on(ETwitterStreamEvent.Data, (x) => console.log("Got data", x));
The call to getStream() throws the following error:
data: {
client_id: '555',
detail: 'When authenticating requests to the Twitter API v2 endpoints, you must use keys and tokens from a Twitter developer App that is attached to a Project. You can create a project via the developer portal.',
registration_url: 'https://developer.twitter.com/en/docs/projects/overview',
title: 'Client Forbidden',
required_enrollment: 'Standard Basic',
reason: 'client-not-enrolled',
type: 'https://api.twitter.com/2/problems/client-forbidden'
}
I also tried using an app only login such as this:
const TWITTER_CLIENT = new TwitterApi({
appKey: CONSUMER_KEY,
appSecret: CONSUMER_SECRET,
accessToken: ACCESS_TOKEN,
accessSecret: ACCESS_TOKEN_SECRET
});
var appClient = await TWITTER_CLIENT.appLogin();
That throws the same error as above.
Using 2.0's getStream() with /tweets/search/stream/ does return an event a tweet is created, but not when it is deleted. It also has a limited query with only 5 rules per stream and rules are only 512 characters in length. Which won't cover all the screen names I currently track in the 1.0 version of the API.
I also tried using compliance jobs, but it takes a very long time and ends up returning an empty array anyways instead of any info about the tweet ids I provided:
var job = await appClient.v2.sendComplianceJob({
type: "tweets",
ids:[
// the ids are not from my dev account or from
// a account that authed my app
"555", // id of tweet I deleted
"123", // id of tweet I deleted
"456", // id of tweet I didn't delete
]
});
// takes 5-10 minutes to say its complete
var jobResults = await appClient.v2.complianceJobResult(job.data);
// echos: jobResults: []
console.log("jobResults", jobResults);
How can I get a stream event of when a tweet is deleted (of any specific user I choose) using the v2 API of twitter?
Unfortunately, Twitter have deprecated User deletes a Tweet event type.
The Only other option is to save all tweets for the accounts you are tracking on your database then compare them to current tweets using lookup API.
but you can only check 100 tweets on every request, so you will need to make a job that loops to check every 100 tweet, then to inform you if a tweet was deleted.
ids: A comma separated list of Tweet IDs. Up to 100 are allowed in a single request. Make sure to not include a space between commas and fields.
source: https://developer.twitter.com/en/docs/twitter-api/tweets/lookup/api-reference/get-tweets#tab0
I am a beginner in Backend developing, and I have this array called (Movie). I use expressJS and I want to save the array on MongoDB.I will use Mongodb atlas for my database. I appreciate your help
I tried to follow this instruction on this website:
https://medium.com/#lavitr01051977/node-express-js-aea19636a500
I ignored the first steps and starts from ( Connecting to the Database ) title but It doesn't work.
var express = require('express')
var app = express()
app.get('/', (req, res) => {
res.send('ok')
});
//movie array
const movies = [
{ title: 'Jaws', year: 1975, rating: 8 },
{ title: 'Avatar', year: 2009, rating: 7.8 },
{ title: 'Brazil', year: 1985, rating: 8 },
{ title: 'الإرهاب والكباب', year: 1992, rating: 6.2 }
]
//read the array movie
app.get('/movies/read/',(req,res) => {
res.send({status:200, data:movies})
})
//add elements to array movies
app.get('/movies/add',(req,res) => {
var t = req.query.title
var y = req.query.year
var r = req.query.rating
if(t == undefined || y == undefined || y.length > 4 || isNaN(y)) {
res.send({status:403, error:true, message:'you cannot create a movie without providing a title and a year'})
}
if (r == "") {
r = 4
}
movies.push({title: t, year: y, rating: r})
res.send({status:200, data:movies})
})
//delete elements from array movies
app.get('/movies/delete/:ID',(req,res) => {
var d = req.params.ID
if (d > 0 && d < movies.length ) {
movies.splice(d-1, 1)
res.send({status:200, message: movies})
}
else {
res.send({status:404, error:true, message:'the movie <ID> does not exist'})
}
})
//update elements from array movies
app.get('/movies/update/:ID',(req,res) => {
let c = req.params.ID
let x = req.query.title
let y = req.query.year
let z = req.query.rating
function update(a, b) {
if(a != undefined || a == "") {
movies[c-1][b] = a
}
}
if(c > 0 && c < movies.length ) {
update(x, 'title')
update(y, 'year')
update(z, 'rating')
res.send({status:200, message: movies})
}
else {
res.send({status:404, error:true, message:'the movie <ID> does not exist'})
}
})
app.listen(3000, () => console.log('listinig on port 3000'))
I expect the answer is like the link that I put it above on medium.com website
mongoose is a framework that facilitates interacting with MongoDB. Actually you basically never want to do all the validation, casting, and logic boilerplate on your own, so why reinvent the wheel.
And since you're a beginner, don't be afraid of frameworks. There are many useful frameworks for many areas of backend and frontend to make life easier for you.
The article you shared is self-explanatory, but I will sum up only the database part for you (I won't go deep into your code, no donkey work. the rest is up to you):
1) First of all install mongoose.
npm install mongoose
The article has --save which is no need to add anymore, as "npm install saves any specified packages into dependencies by default."(ref.)
2) to able to access and use mongoose, you need to import it, in node way, that is require().
const express = require(‘express’)
const mongoose = require(“mongoose”);
const bodyParser = require(‘body-parser’);
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
And what's body-parser there for?
When dealing with database in express, you'll sooner or later come across errors like this one.
and the reason why we need one after all is best explained in this answer.
Also, recent versions of express has its own body parser now, so you can use app.use(express.json()) instead of app.use(bodyParser.json()).
Important: body-parser must be before your routes.
3) use mongoose.connect(url).
url argument is what you find in your MongoDB Atlas. :
Location: clusters tab -> connect -> connect your application -> driver node.js
Which gives you, something like this:
mongodb+srv://<user>:<password>#,cluster>.mongodb.net/test?retryWrites=true&w=majority
Important: you use user and password of the user you made within Database Access tab, not your own user and password.
You can set up your environment variables to secure sensitive and changeable data. But I prefer using config.js for simplicity, and which usually resides in the root of app.
Not only you can secure them (like for using .gitignore), but also easily modify them as there are variables that might change from one environment to another environment, making them available in one place and easy to find, instead of looking for them to change all over your app.
For .env file approach, read this article.
Important: in case you want to put your code in github or anywhere online, which one reason we use config.js, don't forget to add this file in .gitignore to avoid such sensitive data get leaked and exposed to others online.
in config.js you can do so:
exports.username = 'your user';
exports.pass = 'your pass';
exports.myCluster = 'your cluster's name';
Then import them so:
const { username, pass, myCluster } = require('./config'); <- the path might be different for you!
Tip: You can use back-tick (` `) to easily insert those variables for const url, through interpolation.
That is:
const url = `mongodb+srv://${username}:${password},${myCluster}.mongodb.net/test?retryWrites=true&w=majority`
Important: make sure to whitelist your IP from MongoDB side (atlas), otherwise you will get connection error.
Under security: Network Access -> IP Whitelist
You could use 0.0.0.0/0 to whitelist all IPs.
Also, when using VPN, your IP will change too.
4) last but not least, after connecting to database, you need to define your schema:
const moviesSchema = new mongoose.Schema({
title: String,
year: Number,
rating: Number
});
And
const Movies = mongoose.model(“Movies”, moviesSchema);
Tip: A common mistake many newbies make is that they forgot to use new:
new mongoose.Schema({...})
If you want to create your model in a separate file (which is the best practice), you will need to modify your const Movies so:
module.exports = mongoose.model(“Movies”, moviesSchema);
Don't forgot to add const mongoose = require('mongoose'); in that separate js model file.
And in wherever you use want to use this model, you import so:
const Movies= require('../models/movies'); (the path may different for your app)
The rest, my friend, is up to you. What you want to do with your database and how to use it.
Note to others: I put so much time and mind to this as I was writing this. Please, if you see something wrong, or think you can add something, feel free to edit and improve my answer.
I would suggest you to take look at mongoose framework to interact with a Mongo database using NodeJS.
However, in the code you've provided you're not interacting with any database. You would need to define a Schema and then you could save a new doc or do any other action with your collection. Please follow some 'Get started' guide on how to do it.
Hope it helps!
I ll explain step by step. NOTE THAT THIS PROCESS SHOULD BE RUN ONLY ONCE. SO YOU SHOULD ADD THIS CODE TO A SEPARATE MODULE AND RUN IT ONCE. otherwise you will keep adding more items to the db. lets name this module
movies.js
//you need to connect to mongodb through mongoose.
const mongoose = require("mongoose");
mongoose
.connect("mongodb://127.0.0.1:27017/movies", { //will create movies db automatically
useNewUrlParser: true,
useCreateIndex: true
})
.catch(err => {
console.log(err.message);
process.exit(1);
})
.then(() => {
console.log("connected");
});
//next create a schema and model:
const movieSchema = new mongoose.Schema({
title: String,
year: Number,
rating: Number
});
const Movie = mongoose.model("Movie", movieSchema);
//create movies array based on `Movie model` so u can use save() method.
const movies = [
new Movie({ title: "Jaws", year: 1975, rating: 8 }),
new Movie({ title: "Avatar", year: 2009, rating: 7.8 }),
new Movie({ title: "Brazil", year: 1985, rating: 8 }),
new Movie({ title: "الإرهاب والكباب", year: 1992, rating: 6.2 })
];
//Last step save it.
movies.map(async (p, index) => {
await p.save((err, result) => {
if (index === movies.length - 1) {
console.log("DONE!");
mongoose.disconnect();
}
});
});
map is an array method.it iterates over array and save each item inside the array. Once every item in the array is saved we need to disconnect from db. array method takes 2 arguments. on is each item inside the array, second one is the index of each item. index in array starts from 0, so last item's index in movies array is 3 but length of the array is 4 so once 4-1=3 that means we saved every item in the array.
Now run the code
node movies.js
I'm working on a simple registration system using Firebase as a backend. I am successfully authenticating users and writing to the database. I have an index of courses and users with the following structure:
{
courses: { // index all the courses available
key1: {
title: "Course 1",
desc: "This is a description string.",
date: { 2018-01-01 12:00:00Z }
members: {
user1: true
...
}
},
key2 { ... },
},
users: { // track individual user registrations
user1: {
key1: true,
...
},
user2: { ... }
}
}
I have a cloud function that watches for the user to add a course and it builds an array with the corresponding courseId that will look at the courses node to return the appropriate items.
exports.listenForUserClasses = functions.database.ref('users/{userId}')
.onWrite(event => {
var userCourses = [];
var ref = functions.database.ref('users/{userId}');
for(var i=0; i<ref.length; i++) {
userCourses.push(ref[i])
}
console.log(userCourses); // an array of ids under the user's node
});
So, my question has two parts:
How can I build the updated object when the page is loaded?
How do I return the function to the client script?
Question 1: From the client side you want to get the reference to the database path. Then you want to call the child_added event. Keep it in-memory, this will be called whenever one is add then you can update your UI.
var ref = db.ref("path/to/courses");
ref.on("child_added", function(snapshot, prevChildKey) {
var newClass = snapshot.val();
});
If you are completely refreshing the page then you can always grab the data again from the database path by using the value option and calling once
Questions 2: You don't. This is considered an asynchronous function. If you wanted a response from the function then you would setup an HTTP trigger and wait for the response from that function.
I'm creating an app with a "Card stack" similar to Tinder, with a Firebase Realtime DB backend. Each card will be a new unread post, if the user runs out of new posts they will run out of cards. However I don't know the best way to structure the data for this. Could I store the ID of the read posts under the user, then as I watch the Posts feed I could filter out read posts client side?
That seems a bit messy and not a very good option performance wise. Are there better options?
EDIT: Rough code of what I'm thinking:
Data Example
posts:
"-KibasdkjbSAdASd": {
title: 'New Post',
body: {
...
}
},
"-KisadBVsdadSd": {
title: 'New Post 2',
body: {
..
}
}
"-KibaFNQsicaASd": {
title: 'New Post 3',
body: {
...
}
}
users :
"-KisadBVsdadSd": {
name: 'Tom',
readPosts: {
"-KibasdkjbSAdASd": {
title: 'New Post',
body: {
...
}
},
"-KisadBVsdadSd": {
title: 'New Post 2',
body: {
..
}
}
}
}
Code
const rootRef = firebase.database.ref();
const postRef = rootRef.child("posts");
const readPostRef = rootRef.child("users/"+uid+"/readPosts");
let readPosts= [];
//Get initial list of read posts
readPostRef.once("value", function(snapshot) {
readPosts = Object.keys(snapshot);
});
//Update read posts when added
readPostRef.on("child_added", function(snapshot) {
readPosts = Object.keys(snapshot);
});
//Get list of posts, filtered on read post array
urlRef.on("value", function(snapshot) {
snapshot.forEach(function(child) {
if(!readPosts.includes(child.key)){
//Unread post
}
});
});
It depends on the order in which you show the cards to the user.
If you show them in a predictable order (e.g. from newest to oldest) you can just remember the ID of the last card the user saw.
If you show them in a random or personalized order you might indeed have to track precisely what cards each user has already seen.
I'm not sure why that would be messy or perform badly. So if you want a better option, you'll have to show how you'd implement the messy/slow option.
I'm running into this same design problem. I only see two options, perhaps there are others!
1) Download every post and ignore the ones that have been read. Expensive when you have a lot of posts.
2) Save a copy of every post to every user account and allow them to delete them once they have been read. Expensive when you have a lot of users.
Sorry if I'm not getting the terminology right. Here's what I have currently my MongoDB user docs db.users:
"liked" : [
"EBMKgrD4DjZxkxvfY",
"WJzAEF5EKB5aaHWC7",
"beNdpXhYLnKygD3yd",
"RHP3hngma9bhXJQ2g",
"vN7uZ2d6FSfzYJLmm",
"NaqAsFmMmnhqNbqbG",
"EqWEY3qkeJYQscuZJ",
"6wsrFW5pFdnQfoWMs",
"W4NmGXyha8kpnJ2bD",
"8x5NWZiwGq5NWDRZX",
"Qu8CSXveQxdYbyoTa",
"yLLccTvcnZ3D3phAs",
"Kk36iXMHwxXNmgufj",
"dRzdeFAK28aKg3gEX",
"27etCj4zbrKhFWzGS",
"Hk2YpqgwRM4QCgsLv",
"BJwYWumwkc8XhMMYn",
"5CeN95hYZNK5uzR9o"
],
And I am trying to migrate them to a new key that also captures the time that a user liked the post
"liked_time" : [
{
"postId" : "5CeN95hYZNK5uzR9o",
"likedAt" : ISODate("2015-09-23T08:05:51.957Z")
}
],
I am wondering if it might be possible to simply do this within the MongoDB Shell with a command that iterates over each user doc and then iterates over the liked array and then updates and $push the new postId and time.
Or would it be better to do this in JavaScript. I am using Meteor.
I almost got it working for individual users. But want to know if I could do all users at once.
var user = Meteor.users.findOne({username:"atestuser"});
var userLiked = user.liked;
userLiked.forEach(function(entry) {
Meteor.users.update({ username: "atestuser" },
{ $push: { liked_times: { postId: entry, likedAt: new Date() }}});
console.log(entry);
});
Still a bit of a newbie to MongoDB obviously......
Here is something i made real quick you should run this on the server side just put it into a file e.g. "migrate.js" in root meteor and run the meteor app
if (Meteor.isServer) {
Meteor.startup(function () {
var users = Meteor.users.find().fetch();
users.forEach(function (doc) {
liked.forEach(function (postId) {
Meteor.users.update(doc._id, { $push: { liked_times: { postId: postId, likedAt: new Date() } } });
});
});
console.log('finished migrating');
});
}
p.s I didn't test it
If this is a one time migration i would do something like this in a one time js script.
Get all users
Iterate over each user
Get all likes
Iterate over them, get likedAt
var liked_times = _.collect(likes, function (likeId) {
return {
'postId' : likeId,
'likedAt': // get post liked time from like id.
}
});
Insert the above in the collection of choice.
Note:
The above example makes use of lodash
I would rather just save likedAt as a timestamp.