I am trying to deploy my node.js application to digital ocean. Locally my app works fine when I do:
node server.js
I cloned my repository from gitlab with ssh access and tried doing the same thing, but all the page does is being stuck on the loading stage and eventually it says that my ip address took too long to respond.
ERR_CONNECTION_TIMED_OUT
Eventually I am planning on using something to make it run permanently but I will post a separate post for that.
I normally user my ip address:port number to try view my application.
this is my server.js file:
const express = require('express') ,app = express(), path = require('path'),
socket = require('socket.io'), emailModule = require('./email.js'),
formValidationModule = require('./formValidation.js'), vimeoModule = require('./vimeo.js'),
port = process.env.PORT || 3000;
let
server = app.listen(port,function(){
console.log('listening to requests...');
}),
io = socket(server);
app.get('/', function(req, res) {
res.sendFile(path.join(__dirname + '/index.html'));
});
io.on('connection', (socket)=>{
console.log('made a connection!');
socket.on('getShortFilmsInfo', ()=>{
vimeoModule.videos.short_films.forEach((y)=>{
vimeoModule.getVideoThumbnail(y).then((data)=>{
socket.emit('shortFilmsInfo', data);
});
})
});
socket.on('getCommercials',()=>{
vimeoModule.videos.commercials.forEach((x)=>{
vimeoModule.getVideoThumbnail(x).then((data)=>{
socket.emit('commercialInfo', data);
})
});
});
socket.on('email', (data)=>{
if(formValidationModule.checkEmptyContact(data.client, data.email, data.name,
data.title, data.message)){
socket.emit('invalidData');
}
else {
/**
* * Here you need to set your email options which includes the clients email, destination email,
* subject and the text (the email content).
*/
emailModule.setMailOptions(data.email,/*'Info#project-gorilla.co.uk'*/'salay777#hotmail.co.uk'
, data.client + ' ' + '(' +
data.name + ')' + ' ' + data.title, data.message).then((mailOpts)=>{
emailModule.send(mailOpts);
});
console.log('email has been sent!');
}
});
});
Port on which you are running node application in digital ocean is blocked from outside access. Configure digital ocean to route any request on port 80 or 443 ( default port for http and https ) to your internal nodejs application.
Related
I'm trying to connect to the Parse server that is implemented in a VPS, from a website that served with apache.
The website is https://example.com, At first, when I tried to connect to parse server in Javascript codes, I did :
Parse.initialize("myAppId");
Parse.serverURL = 'http://ipOfVPS:1337/parse'
But I get mixed content: the page at '' was loaded over HTTPS .. error.
then I changed parse server Url in javascript to https://ipOfVPS:1337/parse and in the backend of parse server, I run the server with HTTPS. and now when I want to load the website of https://example.com, I get this error in chrome:
net::ERR_CERT_AUTHORITY_INVALID and this error in Firefox:
Cross-Origin Request Blocked: The Same Origin Policy disallows reading
the remote resource at.
I will be thankful if anybody helps me with this issue.
Here below I pasted my index.js:
// Example express application adding the parse-server module to expose Parse
// compatible API routes.
var express = require('express');
var ParseServer = require('parse-server').ParseServer;
var path = require('path');
var databaseUri = process.env.DATABASE_URI || process.env.MONGODB_URI;
if (!databaseUri) {
console.log('DATABASE_URI not specified, falling back to localhost.');
}
var api = new ParseServer({
databaseURI: databaseUri || 'mongodb://localhost:27017/dev',
cloud: process.env.CLOUD_CODE_MAIN || __dirname + '/cloud/main.js',
appId: process.env.APP_ID || 'XXXX',
masterKey: process.env.MASTER_KEY || 'XXXX', //Add your master key here. Keep it secret!
serverURL: process.env.SERVER_URL || 'https://localhost:1337/parse', // Don't forget to change to https if needed
liveQuery: {
classNames: ["Message","Chats"] // List of classes to support for query subscriptions
},
push: {
android: {
apiKey: 'XXX'
}
}
});
// Client-keys like the javascript key or the .NET key are not necessary with parse-server
// If you wish you require them, you can set them as options in the initialization above:
// javascriptKey, restAPIKey, dotNetKey, clientKey
var options = {
key: fs.readFileSync('key.pem'),
cert: fs.readFileSync('cert.pem'),
requestCert: true,
//ca: fs.readFileSync('/etc/ssl/certs/ca.crt'),
rejectUnauthorized: false
};
var app = express();
// Serve static assets from the /public folder
app.use('/public', express.static(path.join(__dirname, '/public')));
// Serve the Parse API on the /parse URL prefix
var mountPath = process.env.PARSE_MOUNT || '/parse';
app.use(mountPath, api);
// Parse Server plays nicely with the rest of your web routes
app.get('/', function(req, res) {
res.status(200).send('I dream of being a website. Please star the parse-server repo on GitHub!');
});
// There will be a test page available on the /test path of your server url
// Remove this before launching your app
app.get('/test', function(req, res) {
res.sendFile(path.join(__dirname, '/public/test.html'));
});
var port = process.env.PORT || 1337;
var httpsServer = require('https').createServer(options,app);
httpsServer.listen(port, function() {
console.log('parse-server-example running on port ' + port + '.');
});
// This will enable the Live Query real-time server
ParseServer.createLiveQueryServer(httpsServer);
So I deployed my website portfolio with Heroku but my contact form (which uses nodemailer) is not working. It's weird because when I try it from my computer, I receive an email but I hear from others that it is not working on their end. This is the code of my index.js:
var express = require('express');
var router = express.Router();
var nodemailer = require('nodemailer');
var cors = require('cors');
const creds = require('./config');
var transport = {
host: 'smtp.gmail.com',
port: 465,
auth: {
user: creds.USER,
pass: creds.PASS
}
}
var transporter = nodemailer.createTransport(transport)
transporter.verify((error, success) => {
if (error) {
console.log(error);
} else {
console.log('Server is ready to take messages');
}
});
router.post('/send', (req, res, next) => {
var name = req.body.name
var email = req.body.email
var message = req.body.message
var content = ` name: ${name} \n email: ${email} \n message: ${message} `
var mail = {
from: name,
to: 'js5360#columbia.edu',
subject: 'New Message from Contact Form',
text: content
}
transporter.sendMail(mail, (err, data) => {
if (err) {
res.json({
status: 'fail'
})
} else {
res.json({
status: 'success'
})
}
})
})
const app = express()
app.use(cors())
app.use(express.json())
app.use('/', router)
app.listen(3002)
Here's the handler function I used:
constructor(props) {
super(props);
this.state = {
name: "",
email: "",
message: "",
}
}
handleSubmit(e){
e.preventDefault();
axios({
method: "POST",
url:"http://localhost:3002/send",
data: this.state
}).then((response)=>{
if (response.data.status === 'success'){
alert("Message Sent.");
this.resetForm()
}else if(response.data.status === 'fail'){
alert("Message failed to send.")
}
})
}
resetForm(){
this.setState({name: "", email: "", message: ""})
}
Previously when I was working on localhost, I had already enabled access to less secure apps so it was working fine locally.
Admittingly, I don't know much about express or nodemailer and followed the instructions outlined here: https://blog.mailtrap.io/react-contact-form/. I have a feeling that the url I am sending get/post requests is a local one, which makes prevents it from working on computers other than my own.
Could anyone provide some input into what I have to fix? Any help would be greatly appreciated:)
Try this,
https://accounts.google.com/b/0/DisplayUnlockCaptcha
I received an email by Google that my account was being accessed from the server location, if you do just tell Google not to block it.
This will allow machines to access your Gmail remotely.
Note:This will work for a short period of time.
I'm actually having this same issue right now but there are a few things that you might need to fix prior to getting to that point, the big one is environment variables.
Is your page deploying to heroku still despite being served on port 3002? Like your content is showing up after Heroku builds it? I had that error and if you're facing it you can solve it by replacing server port with process.env.PORT, which heroku automatically will assign internally during produciton. See the server file I've pasted below.
const port = process.env.PORT || 3000;
...
app.listen(port, () => {
console.log(`Server is up on port ${port}!`);
});
will take care of that.
Secondly, the when deployed the URL isn't going to be localhost anymore. it has to point to your domain suffixed by the api route axios.post('www.example.com/send', object)...
you can have that taken care of during the build by using dotenv environment variables ex:
let URI = process.env.NODE_ENV === 'development' ? process.env.REACT_APP_DEV_URI : process.env.REACT_APP_PROD_URI;
and having a .env file in your root directory
REACT_APP_DEV_URI=http://www.localhost:3000
REACT_APP_PROD_URI=https://www.example.com
then in your react client
axios.post(`${URI}/send-email`, data)
look up the dotenv npm module and using it to pull different environment variables based on if the app is in dev or production mode. via process.env.NODE_ENV
best practices is to .gitignore this file as it an expose credentials in public on github. Heroku will ignore it anyways, environment variables have to be set via the CLI, you can just copy and paste each on in one at a time and set with these commands or there is a page on heroku for adding this config. https://devcenter.heroku.com/articles/config-vars
same thing on the server side.
/**
* express.router() option? : https://stackoverflow.com/questions/61852261/nodemailer-not-working-on-heroku-deployment
*/
const express =
require('express'),
bodyParser = require('body-parser'),
nodemailer = require('nodemailer'),
cors = require('cors'), path = require('path'),
port = process.env.PORT || 3000;
require('dotenv').config();
let directory = process.env.NODE_ENV === 'development' ? 'public' : 'build',
publicPath = path.join(__dirname, '..', directory);
const app = express();
console.log(process.env.NODE_ENV)
app.use(cors());
app.use(express.static(publicPath));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.get('*', (req, res) => {
res.sendFile(path.join(publicPath, 'index.html'));
});
app.post('/send-email', (req, res) => {
console.log('request: ', req.body)
let data = req.body;
let transporter = nodemailer.createTransport({
service: 'gmail',
port: 465,
auth: {
user: process.env.EMAIL,
pass: process.env.PASSWORD
}
});
let mailOptions = {
from: data.email,
to: process.env.EMAIL,
subject: `${data.subject}`,
html: `<p>${data.name}</p>
<p>${data.email}</p>
<p>${data.message}</p>`
};
// console.log('mailOptions are', mailOptions)
transporter.sendMail(mailOptions,[
(err, info) => {
if(err) {
res.send(err)
} else {
res.send(info.messageId)
}
transporter.close();
}]);
})
app.listen(port, () => {
console.log(`Server is up on port ${port}!`);
});
I'll keep you updated as to what I'm doing to get mine to work as well.
I was searching a similar issue. When I send contact mail from my local backend server port 3000, it worked fine. But after pushing to Heroku it didn't work. The issue was related to the .env file.
So by adding the ".env" codes directly to "Config Vars" in the settings panel of the Heroku dashboard, I was able to successfully send an email.
EMAIL_ID=your#email.com
PASSWORD=yourpassword
ie key: EMAIL-ID
value: your#email.com
etc.
All the best
I have socket.io set up and working and now I need to send updates to the users via the same sockets, the updates I get from a different server (the other server makes a GET http request to my nodejs server and I need to take the data from that http request and emit it to a certain user via sockets)
Here's my code, emitting sockets from inside the socket process works fine but from inside the API call doen't work.
var express = require("express");
var app = express();
var http = require("http").createServer(app);
var io = require("socket.io")(http);
app.set('socketIo', io);
io.on("connection", (socket) => {
console.log("User connected: ", socket.id);
io.to(socket.id).emit("new_message", {id: "999", msg: "You are connected, your Id is " + socket.id});
})
app.get("/send/:id/:message", (request, result) => {
const ioEmitter = request.app.get("socketIo");
ioEmitter.to(request.params.id).emit({ id: "999", msg: request.params.message });
result.send("Sending message to socket Id: " + request.params.id)
console.log("Sending message to socket Id: " + request.params.id);
})
const port = 3001;
http.listen(port, () => {
console.log("Listening to port " + port);
});
I want to make an api that will serve files of any extensions.
Like this: http://localhost/download/[file].[extension]
Here is my code, but it is intermittently giving this message: Can't set headers after they are sent.
var express = require('express');
var app = express();
app.get('/download/:fileName/:extension', function(req, res){
var file = __dirname + '/' + req.params.fileName + '.' + req.params.extension;
res.download(file, function(err){
if (err) {
res.sendStatus(404);
}
res.end();
});
});
var server = app.listen(3000, function () {
var host = server.address().address;
var port = server.address().port;
console.log('app listening at http://%s:%s', host, port);
});
res.download has already sent a response (Not always true in the case of an error though)
You can fix this by doing
res.download(file, function(err){
if(err) {
// Check if headers have been sent
if(res.headersSent) {
// You may want to log something here or do something else
} else {
return res.sendStatus(SOME_ERR); // 404, maybe 500 depending on err
}
}
// Don't need res.end() here since already sent
}
Other changes called out in the comments above:
download uses sendFile, which you don't need res.end() after
download's documentation warns that you need to check res.headersSent when handling errors, as the headers may already be sent, which would mean you can't change the status
I have the following code:
/* WEB SERVER AND SOCKET */
var express = require("express");
var app = express();
var port = process.env.PORT || 3010;
var io = require('socket.io').listen(app.listen(port));
io.configure(function () {
io.set("transports", [
'websocket'
, 'xhr-polling'
, 'flashsocket'
, 'htmlfile'
, 'jsonp-polling'
]);
io.set("polling duration", 10);
});
/* TWITTER STUFF */
t.track('hashtag');
t.track('sometag');
t.on('tweet', function (tweet) {
console.log('Tweet received from ' + tweet.user.name + ' (#' + tweet.user.screen_name + '): ' + tweet.text);
io.sockets.emit('tweet', {
user: {
name: tweet.user.name,
screen_name: tweet.user.screen_name,
profile_pic: tweet.user.profile_image_url
},
text: tweet.text,
time: tweet.created_at,
photo: tweet.media.media_url
});
});
When running this, if I tweet using the hashtag, I get the log but no socket message is sent. Also, any following tweets do not do anything, suggesting that the emit function is hanging. In my log when starting the server, at the beginning I see:
info - socket.io started
info - FlashPolicyFileServer received an error event: listen EADDRINUSE
However, connections can still be made. Running netstat -tulpn | grep :3010 doesn't return anything, so I don't really get what the issue is. I'm running socket.io 0.9.16 and express 3.5.1.
change this line
var port = process.env.PORT || 3010;
to
var port =8080; // for testing