Joi validate individual fields of nested schema - joi

I have a nested schema setup like this:
var schema = Joi.object().keys({
first_name: Joi.string().required(),
last_name: Joi.string().required(),
address: Joi.object().keys({
street: Joi.string().required(),
zip: Joi.number().required(),
state: Joi.string().required(),
country: Joi.string().required()
}).required(),
})
var options = { abortEarly: false };
var result = Joi.validate({}, schema, options);
When running this, result.error will return the message:
[
{message: '"first_name" is required', path: 'first_name' },
{message: '"last_name" is required', path: 'last_name' },
{message: '"address" is required', path: 'address' }
]
As you see, the address field is not expanded. I don't want to get a message that the whole address field is missing. Instead, I want to be informed about the individual items in the address schema missing. When I don't make address explicitly required, the items in it don't get validated against required() at all. The nested field validation seems to work only on the entire nested schema object level, not on the individual items within the schema level. How do I make it work with the individual nested schema items?

Adding a "skeleton" object works.
Working example here.
import Joi, {
string,
number,
object
} from "joi";
var schema = object({
first_name: string().required(),
last_name: string().required(),
address: object({
street: string().required(),
zip: number().required(),
state: string().required(),
country: string().required()
})/*.required(), <= Not needed */
})
var skeleton = { address: {} };
var options = { abortEarly: false };
var validate = data => {
var newData = Object.assign({}, skeleton, data);
return Joi.validate( newData, schema, options);
}
console.log(
JSON.stringify(validate({}).error.details, null, 2)
);
Result:
[
{
"message": "\"first_name\" is required",
"path": "first_name",
"type": "any.required",
"context": {
"key": "first_name"
}
},
{
"message": "\"last_name\" is required",
"path": "last_name",
"type": "any.required",
"context": {
"key": "last_name"
}
},
{
"message": "\"street\" is required",
"path": "address.street",
"type": "any.required",
"context": {
"key": "street"
}
},
{
"message": "\"zip\" is required",
"path": "address.zip",
"type": "any.required",
"context": {
"key": "zip"
}
},
{
"message": "\"state\" is required",
"path": "address.state",
"type": "any.required",
"context": {
"key": "state"
}
},
{
"message": "\"country\" is required",
"path": "address.country",
"type": "any.required",
"context": {
"key": "country"
}
}
]

Related

Fetch data using where clause on relational model

In loopback 3.0, I am trying to fetch data from multiple relational models while applying where clause on a relation.
Below is the example JSON for the models:
modelA.json
{
"properties": {
"id": {
"type": "string",
"required": true,
"length": 20
},
"modelBid": {
"type": "string"
},
...
}
"relations": {
"modelB-rel": {
"type": "belongsTo",
"model": "modelB",
"foreignKey": "modelBid",
"primaryKey": "id"
},
...
}
}
modelB.json
{
"properties": {
"id": {
"type": "string",
"required": "false",
"length": "20"
},
"name": {
"type": "string",
"length": "255"
},
...
}
}
SQL equivalent of query which I want to perform:
SELECT * FROM modelA a
LEFT JOIN modelB b
ON a.modelBid = b.id
WHERE b.name = 'abcd'
Loopback filter object I am using, which is not providing fetching the intended results:
modelA.find({
include: [
{
relation: 'modelB-rel',
scope: {
fields: ['id', 'name'],
},
}
],
where: {
modelB-rel: {
name: 'abcd',
},
}
}
...
Please help me to correct the filter object to get the same results as of the mentioned SQL equivalent.
In order to apply filters on related models, you have to put the filter inside the scope of the included model. You can use something like this:
modelA.find({
include: [
{
relation: 'modelB-rel',
scope: {
fields: ['id', 'name'],
where: {name: 'abcd'}
},
}
],
})

Error with migration postgress, i have two columns over, why?

i have a migration, i only have four colums, id, name, last_name and email but when i do a query from postman it show me other colums over SELECT \"id\", \"name\", \"lastName\", \"email\", \"createdAt\", \"updatedAt\" FROM \"Users\" AS \"User\" what is the wrong?
module.exports = {
up: (queryInterface, Sequelize) => {
return queryInterface.createTable('User', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING,
allowNull: false,
},
last_name: {
type: Sequelize.STRING,
allowNull: false,
},
email: {
type: Sequelize.STRING,
allowNull: false,
},
});
},
down: (queryInterface) => {
return queryInterface.dropTable('User');
}
};
and when i used my service
static async getAllUsers() {
try {
const users = await database.User.findAll();
console.log('COnsOLE ', users)
return users
} catch (error) {
throw error;
}
}
i get this error from postman:
{
"status": "error",
"message": {
"name": "SequelizeDatabaseError",
"parent": {
"length": 104,
"name": "error",
"severity": "ERROR",
"code": "42P01",
"position": "73",
"file": "parse_relation.c",
"line": "1180",
"routine": "parserOpenTable",
"sql": "SELECT \"id\", \"name\", \"lastName\", \"email\", \"createdAt\", \"updatedAt\" FROM \"Users\" AS \"User\";"
},
"original": {
"length": 104,
"name": "error",
"severity": "ERROR",
"code": "42P01",
"position": "73",
"file": "parse_relation.c",
"line": "1180",
"routine": "parserOpenTable",
"sql": "SELECT \"id\", \"name\", \"lastName\", \"email\", \"createdAt\", \"updatedAt\" FROM \"Users\" AS \"User\";"
},
"sql": "SELECT \"id\", \"name\", \"lastName\", \"email\", \"createdAt\", \"updatedAt\" FROM \"Users\" AS \"User\";"
}
}
i before used this commands many times: sequelize db:migrate and sequelize db:migrate:undo
this is my git repository: https://github.com/x-rw/basePostgresExpressjs
you should situate in server directory and write npm run dev
That's because your sequelize model instance have updatedAt and createdAt fields and because of that it queries the database to get these two fields too. But in your migration file there is no updatedAt and createdAt fields. So your database table does not have these columns.
You have two options, if you really don't want to use updatedAt and createdAt you should specify that while initializing your sequelize model instance. Check the api reference. You should see options.timestamps. You can set it to false.
class YourModel extends Sequelize.Model { }
YourModel.init(
{
name: {
type: Sequelize.DataTypes.STRING(100),
allowNull: false,
validate: {
notNull: true,
notEmpty: true,
len: [2, 100]
}
},
},
{
sequelize: sequelizeInstance,
timestamps: false // This is what you need.
}
);
If you want to use it however, check my answer here to generate correct migrations.

How to control document do not get or add additional property in loopbak?

I have a registration model in loopback over mongodb with fallowing properties:
"properties": {
"Fname": {
"type": "string",
"required": true
},
"Lname": {
"type": "string",
"required": true
},
"phone": {
"type": "string",
"required": true
},
"date": {
"type": "string",
"required": true
},
"time": {
"type": "string",
"required": true
}
}
in application I post some additional data with model required data, for controlling and processing in server side:
submitForm() {
let headers = new Headers(
{
'Content-Type': 'application/json'
});
let options = new RequestOptions({ headers: headers });
let data = JSON.stringify({
Fname: this.form.Fname,
Lname: this.form.Lname,
phone: this.form.phone,
time: this.form.time,
date: this.form.date,
uid: this.form.uid
});
//console.log(data);
let url = 'http://localhost:3000/api/registrations';
return new Promise((resolve, reject) => {
this.http.post(url, data, options)
.toPromise()
.then((response) => {
console.log('API Response : ', response.status);
resolve(response.json());
})
.catch((error) => {
console.error('API Error : ', error.status);
console.error('API Error : ', JSON.stringify(error));
reject(error.json());
});
});
}
In server side I have this code:
Registration.observe('before save', function (ctx, next) {
if (ctx.instance) {
// When Create (POST)
// ctx.instance have the json properties
console.log("Triggers when create");
if (checkUID(ctx.instance) ==200 ){
console.log('ok');
}
} else {
// When Update (UPDATE)
// ctx.data have the json properties
console.log("Triggers when update");
}
next();
});
but after successfully registration I saw that uid that was added into document and regardless to model's properties, document contains addition properties.
{
"Fname": "Eram",
"Lname": "SA",
"phone": "1234567890",
"date": "2017/10/06",
"time": "17:37:46",
"id": "59d78e3f5e5e6704205038aa",
"uid": "38bc3241a43073a7b40d186f24923cc5"
},

how to access postgresql database view using strongloop

Please help in accessing postgresql database view using strongloop.
im able to access table
{"name": "test",
"options": {
"idInjection": false,
"postgresql": {
"schema": "public",
"table": "test_data_v"
}
},
"properties": {
"assetid": {
"type": "String",
"required": false,
"length": 40,
"precision": null,
"scale": null,
"id": 1,
"postgresql": {
"columnName": "asset_id",
"dataType": "character varying",
"dataLength": 40,
"dataPrecision": null,
"dataScale": null,
"nullable": "YES"
}
}
}}
in same way please suggest me how to access view
Thanks
Divya
I am not installed postgresql but I tried in mysql, Its working fine.
IN your model you can do directly see this Example
In database I have created view that is
CREATE VIEW shareviews AS
SELECT id,name
FROM share where id = 1;
In model you can call viewname directly like this example
module.exports = function(Share) {
var server = require('../../server/server');
var ds = server.dataSources.MySQL; // use server.dataSources.postgres;
Share.list = function(optionalparam, cb) {
var sql = 'select * from shareviews';
ds.connector.execute(sql, function(err, data)
{
if(err) return err;
console.log(err);
console.log("data",data);
cb(null, data);
});
}
Share.remoteMethod(
'list',
{
accepts: {arg: 'param', type: 'string'},
returns: {arg: 'result', type: 'object'},
http: {path: '/list', verb: 'get'}
}
);
};
You need to set data source in datasource.json
{
"db": {
"name": "db",
"connector": "memory"
},
"postgres": {
"host": "localhost",
"port": 5432,
"database": "postgres",
"username": "postgres",
"password": "*******",
"name": "postgres",
"connector": "postgresql"
}
}
Then in model-config.json you need to assign data source name to each
model.
That is
{
"_meta": {
"sources": [
"loopback/common/models",
"loopback/server/models",
"../common/models",
"./models"
],
"mixins": [
"loopback/common/mixins",
"loopback/server/mixins",
"../common/mixins",
"./mixins"
]
},
"User": {
"dataSource": "db"
},
"AccessToken": {
"dataSource": "db",
"public": false
},
"ACL": {
"dataSource": "db",
"public": false
},
"RoleMapping": {
"dataSource": "db",
"public": false
},
"Role": {
"dataSource": "db",
"public": false
},
"yourmodelname": {
"dataSource": "postgres",
"public": true
},
"yourmodelname": {
"dataSource": "postgres",
"public": true
}
}
then you can access database in you model.js or Rest call(example localhost:3000/explorer) For Example my
model name Grocerylist
module.exports = function(Grocerylist) {
Grocerylist.beforeRemote('create', function(context, user, next) {
var req = context.req;
req.body.date = Date.now();
req.body.shopperId = req.accessToken.userId;
next();
});
Grocerylist.complete = function(shopperId, cb) {
Grocerylist.find({
where: {
purchased:false,
shopperId: shopperId,
}
}, function(err, list) {
var response;
if (typeof list === 'undefined' || list.length === 0) {
response = "All done shopping!"
}
else {
response = "Shopping is not done.";
}
cb(null, response);
});
};
Grocerylist.remoteMethod(
'complete',
{
accepts: {
arg: 'shopperId', type: 'string'
},
http: {
path: '/complete',
verb: 'get'
},
returns: {
arg: 'complete',
type: 'string'
}
}
);
};

MongoDB - How to define multiple datatypes for a field in Mongoose?

This is my JSON:
{
"title": "This an item",
"date":1000123123,
"data": [
{
"type": "html",
"content": "<h1>Hi there, this is a H1</h1>"
},
{
"type":"img",
"content": [
{
"title": "Image 1",
"url": "www.google.com/1.jpg",
"description":"This is the first image"
}
]
},
{
"type": "map",
"content": [
{
"lat":323434555,
"lng":4444343434,
"description":"this is just a place"
}
]
}
]
}
As you can see, the "data" fiel stores an array of objects where the "content" field is variable.
How should I model that in Mongoose?
This is how I defined my schema:
module.exports = mongoose.model('TestObject', new Schema({
title: String,
date: Date,
data: [
{
type: String,
content: Object
}
]
}));
And this is the response for the "data" field:
"data": [
{
"type":"img",
"content": [ "[object Object]" ]
},
{
"type":"map",
"content": [ "[object Object]" ]
}
]
What is the correct way to define a varying datatype for an object in Mongoose?
Maybe the Mixed type could meet your requirement
An "anything goes" SchemaType, its flexibility comes at a trade-off of it being harder to maintain. Mixed is available either through Schema.Types.Mixed or by passing an empty object literal.
data: [
{
type: String,
content: Mixed
}
]