Redirect Strapi admin index to admin login - redirect

I am currently looking for way to create a permanent 301 redirect from the default Strapi admin index route (ie strapidomain.com) to the configured admin route (ie strapidomain.com/admin).
I have explored utilizing a custom middleware by configuring the admin package:
Path: ./admin/middlewares/redirect/index.js
const path = require('path');
module.exports = strapi => {
return {
initialize: function(cb) {
strapi.router.get('/', (ctx) => {
ctx.redirect(strapi.config.get('server.admin.url', '/admin'))
});
}
};
};
I then activated the custom middleware with:
Path: ./admin/config/middleware.js
module.exports = {
settings: {
redirect: {
enabled: true
}
}
}
Unfortunately I can still hit the admin panel route without being redirected. Based on everything I have read, this should be possible however I have not been able to get this working.
Thoughts?

for newer version v4+
src/middlewares/redirector.js
module.exports = (config, {strapi}) => {
return async (ctx, next) => {
if (ctx.path === '/') {
ctx.redirect(strapi.config.get('server.admin.url', '/admin'));
return
}
await next()
};
};
config/middlewares.js
module.exports = [
{name: 'global::redirector'},
//...
]

The only issue here is that you just placed the redirect middleware within a admin folder which was absolutely not required. The middlewares folder should directly reside at the root of your project.
Correct the path from:
./admin/middlewares/redirect/index.js
To this:
./middlewares/redirect/index.js
I can show you what I've tried personally, below:
My implementation:
1. Create a directory in the root of your project
$ mkdir -p ./middlewares/redirector/
2. Create a index.js file in ./middlewares/redirector/ with the content as:
module.exports = () => {
return {
initialize() {
strapi.router.get('/', (ctx) => {
ctx.redirect(strapi.config.get('server.admin.url', '/admin'));
});
},
};
};
3. Finally enable the redirector middleware in the config/middleware.js file:
module.exports = {
settings: {
redirector: {
enabled: true,
},
},
};

Related

Strapi email designer plugin reference template to record

I'm currently developing a multi-tenant API with Strapi and for one of the parts I use the Strapi email designer plugin because I want to send some emails but I want them to be custom designed for each tenant, the problem is that the plugin's table is not accessible in the content manager of Strapi so I can only hard code the template to a specific endpoint, is there a way to have the plugin table in the content manager or for it to be referenced to a content manager table something like:
(table)tenant->(field)templateId => (ref-table)plugin-email-designer->(ref-field)templateId
you know so I can switch and set dynamically from the Strapi panel and not with hard-coded endpoints
I've checked your issue briefly, and there is option you are going to like, but it involves using patch-package...
So, let's assume that you have strapi project created and you have added strapi-plugin-email-designer and you are using yarn v1.xx.xx:
yarn add patch-package postinstall-postinstall
Go to node_modules/strapi-plugin-email-designer/server/content-types/email-template/schema.json
change following fileds:
{
...
"pluginOptions": {
"content-manager": {
"visible": true
},
"content-type-builder": {
"visible": true
}
},
...
}
now run
yarn patch-package strapi-plugin-email-designer
now open your projects package.json and add to scripts:
{
"scripts": {
...
"postinstall": "patch-package"
}
}
run
yarn build
yarn develop
head to admin ui, you should see new Collection:
so now you can do that:
Sending Email
Let's assume you added a relation has one called email_template to your model.
Next we need to add custom route, so in /src/api/tenant/routes/ create file called routes.js
/src/api/tenant/routes/routes.js
module.exports = {
routes: [
{
method: 'POST',
path: `/tenants/:id/send`,
handler: `tenant.send`
}
]
}
now, we need to add handler to controller:
/src/api/tenant/controllers/tenant.js
"use strict";
/**
* tenant controller
*/
const { createCoreController } = require("#strapi/strapi").factories;
module.exports = createCoreController("api::tenant.tenant", ({ strapi }) => ({
async send(ctx) {
const { id } = ctx.params;
const { data } = ctx.request.body;
// notice, if you need extra validation you add it here
// if (!data) return ctx.badRequest("no data was provided");
const { to, subject } = data;
const { email_template, ...tenant } = await strapi.db
.query("api::tenant.tenant")
// if you have extra relations it's better to populate them directly here
.findOne({ where: { id }, populate: ["email_template"] });
console.log(email_template);
try {
await strapi
.plugin("email-designer")
.service("email")
.sendTemplatedEmail(
{
to,
//from, < should be set in /config/plugins.js email.settings.defaultFrom
//replayTo < should be set in /config/plugins.js email.settings.defaultReplyTo
},
{
templateReferenceId: email_template.templateReferenceId,
subject,
},
{
...tenant,
// this equals to apply all the data you have in tenant
// this may need to be aligned between your tenant and template
}
);
return { success: `Message sent to ${to}` };
} catch (e) {
strapi.log.debug("📺: ", e);
return ctx.badRequest(null, e);
}
},
}));
don't forget to enable access to /api/tenants/:id/send in admin panel, Settings - Roles
POST http://localhost:1337/api/tenants/1/send
{
"data": {
"to" : "email#example.com",
"subject": "Hello World"
}
}
response:
{
"success": "Message sent to email#example.com"
}
pls note, there is no template validation, e.g. if you give it a wrong template it would not be happy

Why does my redirects() in NextJS not work?

what am I doing wrong? I'm building an accessible website with NextJS and want to redirect to fitting pages to the plain-language-counterpart. But since they are a different kind of language, their URLs are different, too.
My routes are built like this:
Standard language = my-website.com/about
Plain language = my-website.com/plain-language/about
And I have a switch where I can just change the /plain-language/ part
Now I have these routes:
my-website.com/accessible-webdesign
my-website.com/plain-language/for-disabled-persons
And if I click the switch on the first one, it will link me to my-website.com/plain-language/accessible-webdesign, which doesn't exist! So I used redirects() and also restarted my server to fix this, but it doesn't work. It doesn't redirect and I get a 404 just as before.
Can you check my code and tell me, what I should change to make it work?
Thank you!
This is my next.config.js:
const withBundleAnalyzer = require('#next/bundle-analyzer')({
enabled: process.env.ANALYZE === 'true',
});
/** #type {import('next').NextConfig} */
const path = require('path');
const withPWA = require('next-pwa')({
dest: 'public',
disable: process.env.NODE_ENV === 'development',
sw: 'sw.js'
})
const nextConfig = {
async redirects(){
return[
{
source: '/plain-language/accessible-webdesign',
destination: '/plain-language/for-disabled-persons',
permanent: 'true'
}
]
},
reactStrictMode: true,
swcMinify: true,
trailingSlash: false,
webpackDevMiddleware: config => {
config.watchOptions = {
poll: 1000,
aggregateTimeout: 300
}
return config
},
sassOptions: {
includePaths: [path.join(__dirname, 'styles')]
},
experimental: {
images: {
layoutRaw: true
}
},
images: {
/*unoptimized: true - for static export!*/
/*deviceSizes: [640, 750, 828, 1080, 1200, 1920, 2048, 3840],
formats: ['image/webp']*/
}
}
module.exports = withBundleAnalyzer(withPWA({nextConfig}));
My working solution was from here: https://stackoverflow.com/a/58182678/
I put a middleware.ts in the root-folder (right next to package.json, next.config.js etc).
And I wrote this inside:
import { NextResponse } from 'next/server';
import type { NextRequest } from 'next/server';
export async function middleware(request: NextRequest) {
/* /accessible-webdesign --> /for-disabled-persons */
if (request.nextUrl.pathname.startsWith('/plain-language/accessible-webdesign')) {
return NextResponse.redirect(new URL('/plain-language/for-disabled-persons', request.url));
}
/* /another-url --> /another-redirect */
if (request.nextUrl.pathname.startsWith('/plain-language/another-url')) {
return NextResponse.redirect(new URL('/plain-language/another-redirect', request.url));
}
}
Not as beautiful, but working.

How to solve Vercel 500 Internal Server Error?

I have created a project that uses MongoDB to store user info and Next-Auth to authenticate users. On local host this is all working seamlessly. Previously I had a couple errors with my next-auth config, but that seems to be working fine now on Vercel live site. Once the user logs in they are redirected to "my-project/suggestions". On this page I am using getServerSideProps to identify if there is a valid session token. If so, data is pulled from a local json file.
On the live site, when the user logs in, the page is redirected to "/suggestions", yet I am receiving an 500 Internal Server Error page. On the function logs I am getting this error message:
[GET] /_next/data/KpsnuV9k44lUAhQ-0rK-B/suggestions.json
10:10:57:12
2022-05-05T14:10:59.270Z 5b7a7375-045f-4518-864b-7968c3c9385f ERROR [Error: ENOENT: no such file or directory, open '/var/task/public/data/data.json'] {
errno: -2,
syscall: 'open',
path: '/var/task/public/data/data.json',
page: '/suggestions'
}
RequestId: 5b7a7375-045f-4518-864b-7968c3c9385f Error: Runtime exited with error: exit status 1
Runtime.ExitError
This is my first project using MongoDB and Next-Auth.. not so sure what the issue is in this case. In my .env.local file I only have these two variables:
NEXTAUTH_SECRET="MUNKNATION"
NEXTAUTH_URL=http://localhost:3000
How I am pulling the data on local host:
export const getServerSideProps = async (context) => {
const session = await getSession({ req: context.req });
if (!session) {
return {
redirect: {
destination: "/",
permanent: false,
},
};
} else {
let filePath = path.join(process.cwd(), "public", "data", "data.json");
let jsonData = await fs.readFile(filePath);
const data = JSON.parse(jsonData);
const inProgressStatusData = data.productRequests.filter(
(item) => item.status == "in-progress"
);
const liveStatusData = data.productRequests.filter(
(item) => item.status == "live"
);
const plannedStatusData = data.productRequests.filter(
(item) => item.status == "planned"
);
let filterData = filteredData(data, "suggestion");
let feedbackData = {
suggestions: filterData,
progress: inProgressStatusData,
planned: plannedStatusData,
live: liveStatusData,
};
return {
props: { session, feedbackData },
};
}
};
Folder structure:
A simple solution to this problem would be to, inside of your getServerSideProps, instead of calling readFile use readFileSync as follows:
export const getServerSideProps = async (context) => {
...
const file = readFileSync(
join(process.cwd(), "public", "data", "data.json"),
"utf8"
);
const data = JSON.parse(fileData);
...
I have tested this solution with Vercel and it works correctly, in development and production mode.

Firebase rules test does not pass even when using lockdown

I'm trying to test my firebase rules, but they seem to not pass even when I use lock down mode. I followed the guide at https://firebase.google.com/docs/firestore/security/test-rules-emulator
const firebase = require('#firebase/rules-unit-testing');
const fs = require('fs');
const projectId = 'test-judge';
function getAuthedFirestore(auth) {
return firebase.initializeAdminApp({
projectId: projectId,
auth: auth
}).firestore();
}
beforeEach(async () => {
await firebase.clearFirestoreData({ projectId });
});
before(async () => {
const rules = fs.readFileSync('firestore.rules', 'utf8');
await firebase.loadFirestoreRules({
projectId: projectId,
rules: rules
});
});
after(async () => {
await Promise.all(firebase.apps().map(app => app.delete()));
});
describe('locked down', () => {
it("require users to log in before creating a profile", async () => {
const db = getAuthedFirestore(null);
const profile = db.collection("users").doc("alice");
await firebase.assertFails(profile.set({ birthday: "January 1" }));
});
});
here is my firebase.json
{
"firestore": {
"rules": "firestore.rules",
"indexes": "firestore.indexes.json"
}
}
and my package.json
{
"devDependencies": {
"firebase-admin": "^9.11.0",
"#firebase/app": "^0.6.29",
"#firebase/rules-unit-testing": "^1.3.12",
"mocha": "^9.0.3",
"fs-extra": "^10.0.0"
},
"scripts": {
"test": "mocha"
}
}
and here is firestore.rules
rules_version = '2';
service cloud.firestore {
match /databases/{database}/documents {
match /{document=**} {
allow read, write: if false;
}
}
}
It doesn't seem to me like I'm doing anything wrong, but if I run npm test the test fails. I expect it to pass since asserFails is used and in the rules I return false
I should not be able to set the value the test should pass
here is my output
Warning: FIRESTORE_EMULATOR_HOST not set, using default value localhost:8080
locked down
1) require users to log in before creating a profile
0 passing (324ms)
1 failing
1) locked down
require users to log in before creating a profile:
Error: Expected request to fail, but it succeeded.
at C:\Users\Moneer\Desktop\judge_rules\node_modules\#firebase\rules-unit-testing\dist\index.cjs.js:581:31
at async Context.<anonymous> (test\test.js:33:9)
npm ERR! Test failed. See above for more details.
This is expected behavior, Security Rules are what protect your backend services from malicious Client requests. The "AdminApp" is in relation to the Admin-SDK which is a service tool that interacts with the Firebase services directly behind the Security Rules.
For reference, you will notice that the Admin-SDK would normally require Service Account credentials which allows the SDK to authenticate with the GCP IAM services
Just realized I should be using initializeTestApp not initializeAdminApp

Using Grunt to Mock Endpoints

I'm using Yeoman, Grunt, and Bower, to construct a platform for building a frontend independently of a a backend. The idea would be that all of my (AngularJS) controller, services, factories, etc live in this project, and get injected afterwards into my serverside codebase based off the result of grunt build.
My question is:
How can I mock endpoints so that the Grunt server responds to the same endpoints as my (Rails) App will?
At the moment I am using:
angular.module('myApp', ['ngResource'])
.run(['$rootScope', function ($rootScope) {
$rootScope.testState = 'test';
}]);
And then in each of my individual services:
mockJSON = {'foo': 'myMockJSON'}
And on every method:
if($rootScope.testState == 'test'){
return mockJSON;
}
else {
real service logic with $q/$http goes here
}
Then after grunt build, testState = 'test' gets removed.
This is clearly a relatively janky architecture. How can I avoid it? How can I have Grunt respond to the same endpoints as my app (some of which have dynamic params) apply some logic (if necessary), and serve out a json file (possibly dependent on path params)?
I've fixed this issue by using express to write a server that responds with static json.
First I created a directory in my project called 'api'. Within that directory I have the following files:
package.json:
{
"name": "mockAPI",
"version": "0.0.0",
"dependencies": {
"express": "~3.3.4"
}
}
Then I run npm install in this directory.
index.js:
module.exports = require('./lib/server');
lib/server.js:
express = require('express');
var app = express();
app.get('/my/endpoint', function(req, res){
res.json({'foo': 'myMockJSON'});
});
module.exports = app
and finally in my global Gruntfile.js:
connect: {
options: {
port: 9000,
hostname: 'localhost',
},
livereload: {
options: {
middleware: function (connect, options) {
return [
lrSnippet,
mountFolder(connect, '.tmp'),
mountFolder(connect, yeomanConfig.app),
require('./api')
];
}
}
},
Then the services make the requests, and the express server serves the correct JSON.
After grunt build, the express server is simply replaced by a rails server.
As of grunt-contrib-connect v.0.7.0 you can also just add your custom middleware to the existing middleware stack without having to manually rebuild the existing middleware stack.
livereload: {
options: {
open: true,
base: [
'.tmp',
'<%= config.app %>'
],
middleware: function(connect, options, middlewares) {
// inject a custom middleware into the array of default middlewares
middlewares.push(function(req, res, next) {
if (req.url !== '/my/endpoint') {
return next();
}
res.writeHead(200, {'Content-Type': 'application/json' });
res.end("{'foo': 'myMockJSON'}");
});
return middlewares;
}
}
},
See https://github.com/gruntjs/grunt-contrib-connect#middleware for the official documentation.
Alternatively you can use the grunt-connect-proxy to proxy everything that is missing in your test server to an actual backend.
It's quite easy to install, just one thing to remember when adding proxy to your livereload connect middleware is to add it last, like this:
middleware: function (connect) {
return [
lrSnippet,
mountFolder(connect, '.tmp'),
mountFolder(connect, yeomanConfig.app),
proxySnippet
];
}
grunt-connect-prism is similar to the Ruby project VCR. It provides an easy way for front end developers to record HTTP responses returned by their API (or some other remote source) and replay them later. It's basically an HTTP cache, but for developers working on a Single Page Application (SPA). You can also generate stubs for API calls that don't exist, and populate them the way you want.
It's useful for mocking complex & high latency API calls during development. It's also useful when writing e2e tests for your SPA only, removing the server from the equation. This results in much faster execution of your e2e test suite.
Prism works by adding a custom connect middleware to the connect server provided by the grunt-contrib-connect plugin. While in 'record' mode it will generate a file per response on the filesystem with content like the following:
{
"requestUrl": "/api/ponies",
"contentType": "application/json",
"statusCode": 200,
"data": {
"text": "my little ponies"
}
}
DISCLAIMER: I'm the author of this project.
You can use Apache proxy and connect your REST server with gruntjs.
Apache would do this:
proxy / -> gruntjs
proxy /service -> REST server
you would use your application hitting Apache and angular.js application would think that is talking with itself so no cross domain problem.
Here is a great tutorial on how to set this up:
http://alfrescoblog.com/2014/06/14/angular-js-activiti-webapp-with-activiti-rest/
Just my alternative way that based on Abraham P's answer. It does not need to install express within 'api' folder. I can separate the mock services for certain files. For example, my 'api' folder contains 3 files:
api\
index.js // assign all the "modules" and then simply require that.
user.js // all mocking for user
product.js // all mocking for product
file user.js
var user = function(req, res, next) {
if (req.method === 'POST' && req.url.indexOf('/user') === 0) {
res.end(
JSON.stringify({
'id' : '5463c277-87c4-4f1d-8f95-7d895304de12',
'role' : 'admin'
})
);
}
else {
next();
}
}
module.exports = user;
file product.js
var product = function(req, res, next) {
if (req.method === 'POST' && req.url.indexOf('/product') === 0) {
res.end(
JSON.stringify({
'id' : '5463c277-87c4-4f1d-8f95-7d895304de12',
'name' : 'test',
'category': 'test'
})
);
}
else {
next();
}
}
module.exports = product;
index.js just assigns all the "modules" and we simply require that.
module.exports = {
product: require('./product.js'),
user: require('./user.js')
};
My Gruntfile.js file
connect: {
options: {
port: 9000,
// Change this to '0.0.0.0' to access the server from outside.
hostname: 'localhost',
livereload: 35729
},
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app),
require('./api').user,
require('./api').product,
];
}
}
}