I am building a quasar and vue.js app and I want to add a MongoDB API with an express server, there that /ssr-src/ dir where there is an index.js file with basic express app routing:
/*
* This file runs in a Node context (it's NOT transpiled by Babel), so use only
* the ES6 features that are supported by your Node version. https://node.green/
*
* WARNING!
* If you import anything from node_modules, then make sure that the package is specified
* in package.json > dependencies and NOT in devDependencies
*
* Note: This file is used only for PRODUCTION. It is not picked up while in dev mode.
* If you are looking to add common DEV & PROD logic to the express app, then use
* "src-ssr/extension.js"
*/
console.log("got here!") // I added
const express = require("express"),
compression = require("compression");
const ssr = require("quasar-ssr"),
extension = require("./extension"),
app = express(),
port = process.env.PORT || 3000;
const serve = (path, cache) =>
express.static(ssr.resolveWWW(path), {
maxAge: cache ? 1000 * 60 * 60 * 24 * 30 : 0
});
// gzip
app.use(compression({ threshold: 0 }));
// serve this with no cache, if built with PWA:
if (ssr.settings.pwa) {
app.use("/service-worker.js", serve("service-worker.js"));
}
// serve "www" folder
app.use("/", serve(".", true));
// we extend the custom common dev & prod parts here
extension.extendApp({ app, ssr });
// this should be last get(), rendering with SSR
app.get("*", (req, res) => {
res.setHeader("Content-Type", "text/html");
// SECURITY HEADERS
// read more about headers here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers
// the following headers help protect your site from common XSS attacks in browsers that respect headers
// you will probably want to use .env variables to drop in appropriate URLs below,
// and potentially look here for inspiration:
// https://ponyfoo.com/articles/content-security-policy-in-express-apps
// https://developer.mozilla.org/en-us/docs/Web/HTTP/Headers/X-Frame-Options
// res.setHeader('X-frame-options', 'SAMEORIGIN') // one of DENY | SAMEORIGIN | ALLOW-FROM https://example.com
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-XSS-Protection
// res.setHeader('X-XSS-Protection', 1)
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Content-Type-Options
// res.setHeader('X-Content-Type-Options', 'nosniff')
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
// res.setHeader('Access-Control-Allow-Origin', '*') // one of '*', '<origin>' where origin is one SINGLE origin
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-DNS-Prefetch-Control
// res.setHeader('X-DNS-Prefetch-Control', 'off') // may be slower, but stops some leaks
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
// res.setHeader('Content-Security-Policy', 'default-src https:')
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/sandbox
// res.setHeader('Content-Security-Policy', 'sandbox') // this will lockdown your server!!!
// here are a few that you might like to consider adding to your CSP
// object-src, media-src, script-src, frame-src, unsafe-inline
ssr.renderToString({ req, res }, (err, html) => {
if (err) {
if (err.url) {
res.redirect(err.url);
} else if (err.code === 404) {
console.log(404,'!!')
res.status(404).send("404 | Page Not Found foo bar"); // I added foo bar
} else {
// Render Error Page or Redirect
res.status(500).send("500 | Internal Server Error");
if (ssr.settings.debug) {
console.error(`500 on ${req.url}`);
console.error(err);
console.error(err.stack);
}
}
} else {
res.send(html);
}
});
});
app.listen(port, () => {
console.log(`Server listening at port ${port}`);
});
but none of my logs or changings are happening when I run $ queser dev -m ssr
also the Server listening at port ${port} is not showing.
need your help!
quasar version 1.0.7
debian 10
src-ssr/index.js
From the comment notes, it seems like it is for Production only and will not visible in dev mode.
"Note: This file is used only for PRODUCTION"
You may want to use the src-ssr/extension.js instead.
Related
I'm was trying to use "gmail-tester" library to verify the account creation message.
https://www.npmjs.com/package/gmail-tester
It seems that I settled up everything as it was supposed to be done. When my test is finished I supposed to get an assertion in cypress such as this
Instead, cypress is awaiting for a message for 30seconds
, then browser crashes and I got this
Does anyone know what would cause the problem?
I have managed to complete all steps mentioned in this tutorial:
https://levz0r.medium.com/how-to-poll-a-gmail-inbox-in-cypress-io-a4286cfdb888
../cypress/plugins.index.js
/// <reference types="cypress" />
// ***********************************************************
// This example plugins/index.js can be used to load plugins
//
// You can change the location of this file or turn off loading
// the plugins file with the 'pluginsFile' configuration option.
//
// You can read more here:
// https://on.cypress.io/plugins-guide
// ***********************************************************
// This function is called when a project is opened or re-opened (e.g. due to
// the project's config changing)
/**
* #type {Cypress.PluginConfig}
*/
// eslint-disable-next-line no-unused-vars
const path = require("path");
const gmail = require("gmail-tester");
module.exports = (on, config) => {
// `on` is used to hook into various events Cypress emits
// `config` is the resolved Cypress config
// ...
on("task", {
"gmail:check": async args => {
const { from, to, subject } = args;
const email = await gmail.check_inbox(
path.resolve(__dirname, "credentials.json"), // credentials.json is inside plugins/ directory.
path.resolve(__dirname, "gmail_token.json"), // gmail_token.json is inside plugins/ directory.
subject,
from,
to,
10, // Poll interval (in seconds)
12 // Maximum poll interval (in seconds). If reached, return null, indicating the completion of the task().
);
return email;
}
});
};
testCase.spec.js
import Navigation from '../../../utils/navigation.spec'
import LoginPage from '../../../pageobject/login/login-page'
describe("New user registration", async function() {
beforeEach(() => {
cy.visit(Navigation.Login)
})
it.only("Reset Form: Email is delievered", function() {
const test_id = new Date().getTime();
const incoming_mailbox = `userautomatedtest+${test_id}#gmail.com`;
// const password = uuidv1().split("-")[0];
const login = new LoginPage();
const username = "Cypress" + test_id;
const password = "111#wZOO";
login.registerButton()
.usernameInput(username)
.emailInput(incoming_mailbox)
.firstNameInput("Name")
.lastNameInput("Surname")
.passwordInput(password)
.repeatPasswordInput(password)
.registerButton()
//assert
cy.contains('Registration succeeded').should('be.visible')
cy.task("gmail:check", {
from: "dev.mailer.no.reply#gmail.com",
to: incoming_mailbox,
subject: "Registration confirmation"
})
.then(email => {
assert.isNotNull(email, `Email was not found`);
});
});
});
btw: in documentation is mentioned that by changing this number we can manipulate awaiting time for checking email. In my case, I'm changing this value and nothing is happening.
This is some problem with the OAuth consent screen, probably access given is not correct, or the GMail API isn't enabled.
Using the most recent version of this package, I had the same issue with the plugins/index.js crashing.
I solved this by adjusting the options-parameter to match the gmail task package function check_inbox.
module.exports = (on, config) => {
on("task", {
"gmail:check": async (args) => {
const { from, to, subject } = args;
const email = await gmail.check_inbox(
path.resolve(__dirname, "credentials.json"),
path.resolve(__dirname, "gmail_token.json"),
{
subject: subject,
from: from,
to: to,
wait_time_sec: 10,
max_wait_time_sec: 30,
}
);
return email;
},
});
};
I have a Node.js API running on port 81, and want to hit the endpoint from JavaScript like this:
function fetchFromApi() {
const axios = require('axios');
console.log('using port 81',axios.defaults);
axios.request({
method: 'get',
url:'/api/getAccountList',
port: 81, // port options is not valid - this does not have the desired result
})
.then( response => {
console.log(response);
const data = response.data;
const errors = (data.errors) ? data.errors : false;
if (errors) {
setErrors(errors);
}
})
.catch( reason => {
console.log(reason);
});
}
The network tab in chrome developer tools show this request still went to port 80.
When I try to code the entire protocol, port, host and url in the axios request, I get a CORS error:
axios.get('http://localhost:81/api/getAccountList')
Error is:
Access to XMLHttpRequest at 'http://localhost:81/api/getAccountList'
from origin 'http://localhost' has been blocked by CORS policy: No
'Access-Control-Allow-Origin' header is present on the requested
resource.
My API server is a simple Node.js server:
const express = require('express');
const app = express();
const port = 81;
app.get('/api/getAccountList', (req, res) => {
const userIdBy = req.params.userIdBy;
const apiToken = req.params.apiToken;
if (!(userIdBy && apiToken)) {
res.status(200).json({errors:['Missing credentials']});
return true;
}
// check the user id and api token match up:
console.log('Hello');
});
app.listen(port);
How can I make my client query the API using HTTP on port 81?
CORS is a security feature in most browsers that disables cross-origin requests—i.e., requests from a different hostname. To surpass it, install the cors dependency on your Express server via npm using:
npm install cors
Then you need to add it to every app via the cors() function to every {{httpMethod}} you want to allow other domains to make requests to.
Try editing your code like this:
const express = require('express');
const cors = require('cors')
const app = express();
const port = 81;
app.get('/api/getAccountList', cors(), (req, res)=>{})
On the client side, to get Axios to GET from port 81 on the same host as the javascript is running I used:
import axios from 'axios';
//...
//...
axios.defaults.baseURL = window.location.protocol + "//" + window.location.hostname + ":81";
const result = await axios('/your/endpoint');
//...
//...
Can you try to add this to your Node.js server?
// Add headers
app.use(function (req, res, next) {
// Website you wish to allow to connect
res.setHeader('Access-Control-Allow-Origin', 'http://localhost:81');
// Request methods you wish to allow
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', true);
// Pass to next layer of middleware
next();
});
You can try to add only Access-Control-Allow-Origin header or modify others to your needs.
To achieve the required CORS protection AND avoid hard coding the servers FQDN / hostname, I used this code in my node api server:
const express = require('express');
const app = express();
const apiProviderPort = 81;
const allowedApiConsumerPort = 80;
app.use(function (req, res, next) {
const host = req.get('host'); // NOTE host is the fqdn:port
const hostSplit = host.split(':');
var fqdn;
if (hostSplit.length == 1) {
// I am not sure this is needed, it will be if hostname is fqdn[:port]
fqdn = host;
} else if (hostSplit.length == 2) {
fqdn = hostSplit[0];
} else {
console.log('Error the host contained multiple colons!');
}
console.log('protocol:',req.protocol,'host:',host,'fqdn:' + fqdn);
// next line edited March 2020 - I changed + '//' + to + '//:' +
// as the developer tools console showed
// The 'Access-Control-Allow-Origin' header contains the invalid value 'http//localhost:3000'.
const allowableOrigin = req.protocol + '//' + fqdn + ':' + allowedApiConsumerPort;
console.log('allowableOrigin:',allowableOrigin)
res.setHeader('Access-Control-Allow-Origin', allowableOrigin);
next();
});
app.get('/api/userDocReportData/', (req, res) => {
const userIdBy = req.params.userIdBy;
const apiToken = req.params.apiToken;
if (!(userIdBy && apiToken)) {
res.status(200).json({errors:['Missing credentials']});
return true;
}
// check the user id and api token match up:
// ...
// get your payload etc
res.status(200).json({errors:false,payload:{} });
});
app.listen(apiProviderPort);
I enhanced #webprogrammers answer above as I wanted code that could work in any environment (localhost; test.example.com, live.example.com etc)
I have deployed 2 Ubuntu servers on Azure. First, I have installed the Parse Server and the second, I installed MongoDB. (I have also put a ready db there from my previous server via mongorestore)
Everything works fine! Both Parse Server and MongoDB server. They also communicate well. The thing is, when I run my iOS app, it brings all data correctly, except images. I print the URL of an image and here's what it returned: http://localhost:1337/parse/files/filename.jpeg
If I replace localhost with my server's ip, the image is being fetched nicely!
Here's what I have on my index.js:
var express = require('express');
var ParseServer = require('parse-server').ParseServer;
var ParseDashboard = require('parse-dashboard');
var allowInsecureHTTP = true;
var path = require('path');
var databaseUri = process.env.DATABASE_URI || process.env.MONGODB_URI;
if (!databaseUri) {
console.log('DATABASE_URI not specified, falling back to localhost.');
}
var api = new ParseServer({
databaseURI: databaseUri || 'mongodb://IP:27017/db',
cloud: './cloud/main.js',
appId: process.env.APP_ID || 'xxx',
masterKey: process.env.MASTER_KEY || 'xxx', //Add your master key here. Keep it secret!
fileKey: 'xxx',
serverURL: process.env.SERVER_URL || 'http://localhost:1337/parse', // Don't forget to change to https if needed
// Enable email verification
verifyUserEmails: false,
// The public URL of your app.
// This will appear in the link that is used to verify email addresses and reset passwords.
// Set the mount path as it is in serverURL
publicServerURL: 'http://localhost:1337/parse',
});
// Client-keys like the javascript key or the .NET key are not necessary with parse-server
// If you wish you require them, you can set them as options in the initialization above:
// javascriptKey, restAPIKey, dotNetKey, clientKey
var app = express();
// Serve static assets from the /public folder
app.use('/public', express.static(path.join(__dirname, '/public')));
// Serve the Parse API on the /parse URL prefix
var mountPath = process.env.PARSE_MOUNT || '/parse';
app.use(mountPath, api);
// Parse Server plays nicely with the rest of your web routes
app.get('/', function(req, res) {
res.status(200).send('Make sure to star the parse-server repo on GitHub!');
});
// There will be a test page available on the /test path of your server url
// Remove this before launching your app
app.get('/test', function(req, res) {
res.sendFile(path.join(__dirname, '/public/test.html'));
});
var port = process.env.PORT || 1337;
var httpServer = require('http').createServer(app);
httpServer.listen(port, function() {
console.log('parse-server-example running on port ' + port + '.');
});
// Set up parse dashboard
var config = {
"allowInsecureHTTP": true,
"apps": [
{
"serverURL": "http://localhost:1337/parse",
"appId": "xxx",
"masterKey": "xxx",
"appName": "name",
"production": true
}
],
"users": [
{
"user":"username",
"pass":"pass"
}
]
};
var dashboard = new ParseDashboard(config, config.allowInsecureHTTP);
var dashApp = express();
// make the Parse Dashboard available at /dashboard
dashApp.use('/dashboard', dashboard);
// Parse Server plays nicely with the rest of your web routes
dashApp.get('/', function(req, res) {
res.status(200).send('Parse Dashboard App');
});
var httpServerDash = require('http').createServer(dashApp);
httpServerDash.listen(4040, function() {
console.log('dashboard-server running on port 4040.');
});
One thing I noticed at Parse's documentation, is this: When using files on Parse, you will need to use the publicServerURL option in your Parse Server config. This is the URL that files will be accessed from, so it should be a URL that resolves to your Parse Server. Make sure to include your mount point in this URL.
The thing is that this documentation was written having in mind MongoDB, is on the same server with Parse, which in my case isn't.
Any ideas on what to do?
I had to replace the publicServerURL of parse server's config, from http://localhost:1337/parse to http://publicIP:1337/parse and everything worked out great!
If you want to work with files(images) download them, just use publicServerURL as mentioned #Sotiris Kaniras
I would add that the config.json is in ~/stack/parse/config.json. Also here is the difference between serverURL and publicServerURL
Difference between serverURL and publicServerURL on ParseServer
In my case, I needed to add publicServerURL parameter alongside with serverURL because it hasn't existed yet.
So both parameters(publicServerURL & serverURL) are complement, not mutually exclusive, use them both.
I'm using Yeoman, Grunt, and Bower, to construct a platform for building a frontend independently of a a backend. The idea would be that all of my (AngularJS) controller, services, factories, etc live in this project, and get injected afterwards into my serverside codebase based off the result of grunt build.
My question is:
How can I mock endpoints so that the Grunt server responds to the same endpoints as my (Rails) App will?
At the moment I am using:
angular.module('myApp', ['ngResource'])
.run(['$rootScope', function ($rootScope) {
$rootScope.testState = 'test';
}]);
And then in each of my individual services:
mockJSON = {'foo': 'myMockJSON'}
And on every method:
if($rootScope.testState == 'test'){
return mockJSON;
}
else {
real service logic with $q/$http goes here
}
Then after grunt build, testState = 'test' gets removed.
This is clearly a relatively janky architecture. How can I avoid it? How can I have Grunt respond to the same endpoints as my app (some of which have dynamic params) apply some logic (if necessary), and serve out a json file (possibly dependent on path params)?
I've fixed this issue by using express to write a server that responds with static json.
First I created a directory in my project called 'api'. Within that directory I have the following files:
package.json:
{
"name": "mockAPI",
"version": "0.0.0",
"dependencies": {
"express": "~3.3.4"
}
}
Then I run npm install in this directory.
index.js:
module.exports = require('./lib/server');
lib/server.js:
express = require('express');
var app = express();
app.get('/my/endpoint', function(req, res){
res.json({'foo': 'myMockJSON'});
});
module.exports = app
and finally in my global Gruntfile.js:
connect: {
options: {
port: 9000,
hostname: 'localhost',
},
livereload: {
options: {
middleware: function (connect, options) {
return [
lrSnippet,
mountFolder(connect, '.tmp'),
mountFolder(connect, yeomanConfig.app),
require('./api')
];
}
}
},
Then the services make the requests, and the express server serves the correct JSON.
After grunt build, the express server is simply replaced by a rails server.
As of grunt-contrib-connect v.0.7.0 you can also just add your custom middleware to the existing middleware stack without having to manually rebuild the existing middleware stack.
livereload: {
options: {
open: true,
base: [
'.tmp',
'<%= config.app %>'
],
middleware: function(connect, options, middlewares) {
// inject a custom middleware into the array of default middlewares
middlewares.push(function(req, res, next) {
if (req.url !== '/my/endpoint') {
return next();
}
res.writeHead(200, {'Content-Type': 'application/json' });
res.end("{'foo': 'myMockJSON'}");
});
return middlewares;
}
}
},
See https://github.com/gruntjs/grunt-contrib-connect#middleware for the official documentation.
Alternatively you can use the grunt-connect-proxy to proxy everything that is missing in your test server to an actual backend.
It's quite easy to install, just one thing to remember when adding proxy to your livereload connect middleware is to add it last, like this:
middleware: function (connect) {
return [
lrSnippet,
mountFolder(connect, '.tmp'),
mountFolder(connect, yeomanConfig.app),
proxySnippet
];
}
grunt-connect-prism is similar to the Ruby project VCR. It provides an easy way for front end developers to record HTTP responses returned by their API (or some other remote source) and replay them later. It's basically an HTTP cache, but for developers working on a Single Page Application (SPA). You can also generate stubs for API calls that don't exist, and populate them the way you want.
It's useful for mocking complex & high latency API calls during development. It's also useful when writing e2e tests for your SPA only, removing the server from the equation. This results in much faster execution of your e2e test suite.
Prism works by adding a custom connect middleware to the connect server provided by the grunt-contrib-connect plugin. While in 'record' mode it will generate a file per response on the filesystem with content like the following:
{
"requestUrl": "/api/ponies",
"contentType": "application/json",
"statusCode": 200,
"data": {
"text": "my little ponies"
}
}
DISCLAIMER: I'm the author of this project.
You can use Apache proxy and connect your REST server with gruntjs.
Apache would do this:
proxy / -> gruntjs
proxy /service -> REST server
you would use your application hitting Apache and angular.js application would think that is talking with itself so no cross domain problem.
Here is a great tutorial on how to set this up:
http://alfrescoblog.com/2014/06/14/angular-js-activiti-webapp-with-activiti-rest/
Just my alternative way that based on Abraham P's answer. It does not need to install express within 'api' folder. I can separate the mock services for certain files. For example, my 'api' folder contains 3 files:
api\
index.js // assign all the "modules" and then simply require that.
user.js // all mocking for user
product.js // all mocking for product
file user.js
var user = function(req, res, next) {
if (req.method === 'POST' && req.url.indexOf('/user') === 0) {
res.end(
JSON.stringify({
'id' : '5463c277-87c4-4f1d-8f95-7d895304de12',
'role' : 'admin'
})
);
}
else {
next();
}
}
module.exports = user;
file product.js
var product = function(req, res, next) {
if (req.method === 'POST' && req.url.indexOf('/product') === 0) {
res.end(
JSON.stringify({
'id' : '5463c277-87c4-4f1d-8f95-7d895304de12',
'name' : 'test',
'category': 'test'
})
);
}
else {
next();
}
}
module.exports = product;
index.js just assigns all the "modules" and we simply require that.
module.exports = {
product: require('./product.js'),
user: require('./user.js')
};
My Gruntfile.js file
connect: {
options: {
port: 9000,
// Change this to '0.0.0.0' to access the server from outside.
hostname: 'localhost',
livereload: 35729
},
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app),
require('./api').user,
require('./api').product,
];
}
}
}
I have an application up and running on Heroku with Express.js on Node.js with https. How do I identify the protocol to force a redirect to https with Node.js on Heroku?
My app is just a simple http-server, it doesn't (yet) realize Heroku is sending it https-requests:
// Heroku provides the port they want you on in this environment variable (hint: it's not 80)
app.listen(process.env.PORT || 3000);
As of today, 10th October 2014, using Heroku Cedar stack, and ExpressJS ~3.4.4, here is a working set of code.
The main thing to remember here is that we ARE deploying to Heroku. SSL termination happens at the load balancer, before encrypted traffic reaches your node app. It is possible to test whether https was used to make the request with req.headers['x-forwarded-proto'] === 'https'.
We don't need to concern ourselves with having local SSL certificates inside the app etc as you might if hosting in other environments. However, you should get a SSL Add-On applied via Heroku Add-ons first if using your own certificate, sub-domains etc.
Then just add the following to do the redirect from anything other than HTTPS to HTTPS.
This is very close to the accepted answer above, but:
Ensures you use "app.use" (for all actions, not just get)
Explicitly externalises the forceSsl logic into a declared function
Does not use '*' with "app.use" - this actually failed when I
tested it.
Here, I only want SSL in production. (Change as suits your needs)
Code:
var express = require('express'),
env = process.env.NODE_ENV || 'development';
var forceSsl = function (req, res, next) {
if (req.headers['x-forwarded-proto'] !== 'https') {
return res.redirect(['https://', req.get('Host'), req.url].join(''));
}
return next();
};
app.configure(function () {
if (env === 'production') {
app.use(forceSsl);
}
// other configurations etc for express go here...
});
Note for SailsJS (0.10.x) users. You can simply create a policy (enforceSsl.js) inside api/policies:
module.exports = function (req, res, next) {
'use strict';
if ((req.headers['x-forwarded-proto'] !== 'https') && (process.env.NODE_ENV === 'production')) {
return res.redirect([
'https://',
req.get('Host'),
req.url
].join(''));
} else {
next();
}
};
Then reference from config/policies.js along with any other policies, e.g:
'*': ['authenticated', 'enforceSsl']
The answer is to use the header of 'x-forwarded-proto' that Heroku passes forward as it does it's proxy thingamabob. (side note: They pass several other x- variables too that may be handy, check them out).
My code:
/* At the top, with other redirect methods before other routes */
app.get('*',function(req,res,next){
if(req.headers['x-forwarded-proto']!='https')
res.redirect('https://mypreferreddomain.com'+req.url)
else
next() /* Continue to other routes if we're not redirecting */
})
Thanks Brandon, was just waiting for that 6 hour delay thing that wouldn't let me answer my own question.
The accepted answer has a hardcoded domain in it, which isn't too good if you have the same code on several domains (eg: dev-yourapp.com, test-yourapp.com, yourapp.com).
Use this instead:
/* Redirect http to https */
app.get("*", function (req, res, next) {
if ("https" !== req.headers["x-forwarded-proto"] && "production" === process.env.NODE_ENV) {
res.redirect("https://" + req.hostname + req.url);
} else {
// Continue to other routes if we're not redirecting
next();
}
});
https://blog.mako.ai/2016/03/30/redirect-http-to-https-on-heroku-and-node-generally/
I've written a small node module that enforces SSL on express projects. It works both in standard situations and in case of reverse proxies (Heroku, nodejitsu, etc.)
https://github.com/florianheinemann/express-sslify
If you want to test out the x-forwarded-proto header on your localhost, you can use nginx to setup a vhost file that proxies all of the requests to your node app. Your nginx vhost config file might look like this
NginX
server {
listen 80;
listen 443;
server_name dummy.com;
ssl on;
ssl_certificate /absolute/path/to/public.pem;
ssl_certificate_key /absolute/path/to/private.pem;
access_log /var/log/nginx/dummy-access.log;
error_log /var/log/nginx/dummy-error.log debug;
# node
location / {
proxy_pass http://127.0.0.1:3000/;
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
The important bits here are that you are proxying all requests to localhost port 3000 (this is where your node app is running) and you are setting up a bunch of headers including X-Forwarded-Proto
Then in your app detect that header as usual
Express
var app = express()
.use(function (req, res, next) {
if (req.header('x-forwarded-proto') == 'http') {
res.redirect(301, 'https://' + 'dummy.com' + req.url)
return
}
next()
})
Koa
var app = koa()
app.use(function* (next) {
if (this.request.headers['x-forwarded-proto'] == 'http') {
this.response.redirect('https://' + 'dummy.com' + this.request.url)
return
}
yield next
})
Hosts
Finally you have to add this line to your hosts file
127.0.0.1 dummy.com
You should take a look at heroku-ssl-redirect. It works like a charm!
var sslRedirect = require('heroku-ssl-redirect');
var express = require('express');
var app = express();
// enable ssl redirect
app.use(sslRedirect());
app.get('/', function(req, res){
res.send('hello world');
});
app.listen(3000);
If you are using cloudflare.com as CDN in combination with heroku, you can enable automatic ssl redirect within cloudflare easily like this:
Login and go to your dashboard
Select Page Rules
Add your domain, e.g. www.example.com and switch always use https to on
Loopback users can use a slightly adapted version of arcseldon answer as middleware:
server/middleware/forcessl.js
module.exports = function() {
return function forceSSL(req, res, next) {
var FORCE_HTTPS = process.env.FORCE_HTTPS || false;
if (req.headers['x-forwarded-proto'] !== 'https' && FORCE_HTTPS) {
return res.redirect(['https://', req.get('Host'), req.url].join(''));
}
next();
};
};
server/server.js
var forceSSL = require('./middleware/forcessl.js');
app.use(forceSSL());
This is a more Express specific way to do this.
app.enable('trust proxy');
app.use('*', (req, res, next) => {
if (req.secure) {
return next();
}
res.redirect(`https://${req.hostname}${req.url}`);
});
I am using Vue, Heroku and had same problem :
I updated my server.js as below, and i am not touching it anymore because it is working :) :
const serveStatic = require('serve-static')
const sts = require('strict-transport-security');
const path = require('path')
var express = require("express");
require("dotenv").config();
var history = require("connect-history-api-fallback");
const app = express()
const globalSTS = sts.getSTS({'max-age':{'days': 365}});
app.use(globalSTS);
app.use(
history({
verbose: true
})
);
app.use((req, res, next) => {
if (req.header('x-forwarded-proto') !== 'https') {
res.redirect(`https://${req.header('host')}${req.url}`)
} else {
next();
}
});
app.use('/', serveStatic(path.join(__dirname, '/dist')));
app.get(/.*/, function (req, res) {
res.sendFile(path.join(__dirname, '/dist/index.html'))
})
const port = process.env.PORT || 8080
app.listen(port)
console.log(`app is listening on port: ${port}`)
app.all('*',function(req,res,next){
if(req.headers['x-forwarded-proto']!='https') {
res.redirect(`https://${req.get('host')}`+req.url);
} else {
next(); /* Continue to other routes if we're not redirecting */
}
});
With app.use and dynamic url. Works both localy and on Heroku for me
app.use(function (req, res, next) {
if (req.header('x-forwarded-proto') === 'http') {
res.redirect(301, 'https://' + req.hostname + req.url);
return
}
next()
});
Checking the protocol in the X-Forwarded-Proto header works fine on Heroku, just like Derek has pointed out. For what it's worth, here is a gist of the Express middleware that I use and its corresponding test.