Sailsjs Model Object Not Returning Data For Postgresql - postgresql

I have the following in my Sailsjs config/adapter.js:
module.exports.adapters = {
'default': 'postgres',
postgres : {
module : 'sails-postgresql',
host : 'xxx.compute-1.amazonaws.com',
port : 5432,
user : 'xxx',
password : 'xxx',
database : 'xxx',
ssl : true,
schema : true
}
};
And in models/Movie.js:
Movie = {
attributes: {
tableName: 'movies.movies',
title: 'string',
link: 'string'
}
};
module.exports = Movie;
In my controller:
Movie.query("SELECT * FROM movies.movies", function(err, movies) {
console.log('movies', movies.rows);
});
movies.rows DOES return the correct data
However:
Movie.find({ title: 'Frozen' }, function(err, movies) {
console.log('movies', movies)
});
movies returns an EMPTY ARRAY
So it seems all connections are good because the raw query works perfectly.
Could there be something I am doing wrong with setting up the Movie.find() or with models/Movie.js?
Does the tableName attribute not support postgresql schema_name.table_name?

First off, you need to move tableName out of attributes, since it's a class-level property. Second, sails-postgresql does have some (very undocumented) support for schemas, using the meta.schemaName option:
Movie = {
tableName: 'movies',
meta: {
schemaName: 'movie'
},
attributes: {
title: 'string',
link: 'string'
}
};
module.exports = Movie;
You can give that a try, and if it doesn't work, either move your table into the public schema, or nudge the author of the schemaName support for help.

Related

How to make Mongoose model.insertMany insert documents with numerical and ordered ids?

I have this route in the backend express server:
router.route('/fillInformationAssetsSeverityEvaluation').post((req, res) => {
informationAssetsSeverityEvaluationRow.remove({}, (err) => {
if (err)
console.log(err);
else
// res.json("informationAssets Collection has been dropped!");
res.json('information Assets Severity Evaluation data has been received on the server side')
informationAssetsSeverityEvaluationRow.insertMany([req.body[0]], {
multi: true
}).then(documentsInserted => {
console.log('[req.body[0]]: ', [req.body[0]]);
console.log('documentsInserted: ', documentsInserted);
console.log('You have succesfully inserted ', documentsInserted.length, ' documents in informationAssetsSeverityEvaluation collection');
});
});
})
For the sake of simplicity, I am inserting only one document.
[req.body[0]]
{ REF: 'REFSHIT',
confFin: 'A',
confRep: 'A'}
But, in the real applications, I am inserting multiple documents similar to that.
This consoleLog :
console.log('documentsInserted: ', documentsInserted);
logs:
documentsInserted: [ { _id: 5d3453afc302d718e4870b53,
REF: 'REFSHIT',
confFin: 'A',
confRep: 'A'}]
As you see the id is automatically generated:
> _id: 5d3453afc302d718e4870b53
What I would like is: The ids of the different documents to be "numerically ordered". I.e:
Document 0 would have id 0
Document 1 would have id 1
Document 2 would have id 2
And so on and so forth.
After having made some research, I found out that I can do this manually by inserting the id manually inside the updateMany objects.
However, since I receive the documents objects from the request body, this is not a viable solution.
Any help?
Finally after trying four modules and a couple of days of trying for something that should be native to mongodb, I have found a simple solution. I hope it helps someone.
1/ Install mongoose-plugin-autoinc
2/
import mongoose from 'mongoose';
import { autoIncrement } from 'mongoose-plugin-autoinc';
const connection = mongoose.createConnection("mongodb://localhost/myDatabase");
const BookSchema = new mongoose.Schema({
author: { type: Schema.Types.ObjectId, ref: 'Author' },
title: String,
genre: String,
publishDate: Date
});
BookSchema.plugin(autoIncrement, 'Book');
const Book = connection.model('Book', BookSchema);
2/ In my case I have the models defined in models.js and the connection defined in server.js so I had to write this :
BookSchema.plugin(autoIncrement, 'Book');
in models.js
and instead of
const Book = connection.model('Book', BookSchema);
I have:
module.exports = {
informationAssetsRow: mongoose.model('informationAssetsRow', informationAssetsRow),
};
And in server.js:
const {
informationAssetsRow,
} = require('./models/models')

how to connect postgresql with graphql [duplicate]

GraphQL has mutations, Postgres has INSERT; GraphQL has queries, Postgres has SELECT's; etc., etc.. I haven't found an example showing how you could use both in a project, for example passing all the queries from front end (React, Relay) in GraphQL, but to a actually store the data in Postgres.
Does anyone know what Facebook is using as DB and how it's connected with GraphQL?
Is the only option of storing data in Postgres right now to build custom "adapters" that take the GraphQL query and convert it into SQL?
GraphQL is database agnostic, so you can use whatever you normally use to interact with the database, and use the query or mutation's resolve method to call a function you've defined that will get/add something to the database.
Without Relay
Here is an example of a mutation using the promise-based Knex SQL query builder, first without Relay to get a feel for the concept. I'm going to assume that you have created a userType in your GraphQL schema that has three fields: id, username, and created: all required, and that you have a getUser function already defined which queries the database and returns a user object. In the database I also have a password column, but since I don't want that queried I leave it out of my userType.
// db.js
// take a user object and use knex to add it to the database, then return the newly
// created user from the db.
const addUser = (user) => (
knex('users')
.returning('id') // returns [id]
.insert({
username: user.username,
password: yourPasswordHashFunction(user.password),
created: Math.floor(Date.now() / 1000), // Unix time in seconds
})
.then((id) => (getUser(id[0])))
.catch((error) => (
console.log(error)
))
);
// schema.js
// the resolve function receives the query inputs as args, then you can call
// your addUser function using them
const mutationType = new GraphQLObjectType({
name: 'Mutation',
description: 'Functions to add things to the database.',
fields: () => ({
addUser: {
type: userType,
args: {
username: {
type: new GraphQLNonNull(GraphQLString),
},
password: {
type: new GraphQLNonNull(GraphQLString),
},
},
resolve: (_, args) => (
addUser({
username: args.username,
password: args.password,
})
),
},
}),
});
Since Postgres creates the id for me and I calculate the created timestamp, I don't need them in my mutation query.
The Relay Way
Using the helpers in graphql-relay and sticking pretty close to the Relay Starter Kit helped me, because it was a lot to take in all at once. Relay requires you to set up your schema in a specific way so that it can work properly, but the idea is the same: use your functions to fetch from or add to the database in the resolve methods.
One important caveat is that the Relay way expects that the object returned from getUser is an instance of a class User, so you'll have to modify getUser to accommodate that.
The final example using Relay (fromGlobalId, globalIdField, mutationWithClientMutationId, and nodeDefinitions are all from graphql-relay):
/**
* We get the node interface and field from the Relay library.
*
* The first method defines the way we resolve an ID to its object.
* The second defines the way we resolve an object to its GraphQL type.
*
* All your types will implement this nodeInterface
*/
const { nodeInterface, nodeField } = nodeDefinitions(
(globalId) => {
const { type, id } = fromGlobalId(globalId);
if (type === 'User') {
return getUser(id);
}
return null;
},
(obj) => {
if (obj instanceof User) {
return userType;
}
return null;
}
);
// a globalId is just a base64 encoding of the database id and the type
const userType = new GraphQLObjectType({
name: 'User',
description: 'A user.',
fields: () => ({
id: globalIdField('User'),
username: {
type: new GraphQLNonNull(GraphQLString),
description: 'The username the user has selected.',
},
created: {
type: GraphQLInt,
description: 'The Unix timestamp in seconds of when the user was created.',
},
}),
interfaces: [nodeInterface],
});
// The "payload" is the data that will be returned from the mutation
const userMutation = mutationWithClientMutationId({
name: 'AddUser',
inputFields: {
username: {
type: GraphQLString,
},
password: {
type: new GraphQLNonNull(GraphQLString),
},
},
outputFields: {
user: {
type: userType,
resolve: (payload) => getUser(payload.userId),
},
},
mutateAndGetPayload: ({ username, password }) =>
addUser(
{ username, password }
).then((user) => ({ userId: user.id })), // passed to resolve in outputFields
});
const mutationType = new GraphQLObjectType({
name: 'Mutation',
description: 'Functions to add things to the database.',
fields: () => ({
addUser: userMutation,
}),
});
const queryType = new GraphQLObjectType({
name: 'Query',
fields: () => ({
node: nodeField,
user: {
type: userType,
args: {
id: {
description: 'ID number of the user.',
type: new GraphQLNonNull(GraphQLID),
},
},
resolve: (root, args) => getUser(args.id),
},
}),
});
We address this problem in Join Monster, a library we recently open-sourced to automatically translate GraphQL queries to SQL based on your schema definitions.
This GraphQL Starter Kit can be used for experimenting with GraphQL.js and PostgreSQL:
https://github.com/kriasoft/graphql-starter-kit - Node.js, GraphQL.js, PostgreSQL, Babel, Flow
(disclaimer: I'm the author)
Have a look at graphql-sequelize for how to work with Postgres.
For mutations (create/update/delete) you can look at the examples in the relay repo for instance.
Postgraphile https://www.graphile.org/postgraphile/ is Open Source
Rapidly build highly customisable, lightning-fast GraphQL APIs
PostGraphile is an open-source tool to help you rapidly design and
serve a high-performance, secure, client-facing GraphQL API backed
primarily by your PostgreSQL database. Delight your customers with
incredible performance whilst maintaining full control over your data
and your database. Use our powerful plugin system to customise every
facet of your GraphQL API to your liking.
You can use an ORM like sequelize if you're using Javascript or Typeorm if you're using Typescript
Probably FB using mongodb or nosql in backend. I've recently read a blog entry which explain how to connect to mongodb. Basically, you need to build a graph model to match the data you already have in your DB. Then write resolve, reject function to tell GQL how to behave when posting a query request.
See https://www.compose.io/articles/using-graphql-with-mongodb/
Have a look at SequelizeJS which is a promise based ORM that can work with a number of dialects; PostgreSQL, MySQL, SQLite and MSSQL
The below code is pulled right from its example
const Sequelize = require('sequelize');
const sequelize = new Sequelize('database', 'username', 'password', {
host: 'localhost',
dialect: 'mysql'|'sqlite'|'postgres'|'mssql',
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000
},
// SQLite only
storage: 'path/to/database.sqlite',
// http://docs.sequelizejs.com/manual/tutorial/querying.html#operators
operatorsAliases: false
});
const User = sequelize.define('user', {
username: Sequelize.STRING,
birthday: Sequelize.DATE
});
sequelize.sync()
.then(() => User.create({
username: 'janedoe',
birthday: new Date(1980, 6, 20)
}))
.then(jane => {
console.log(jane.toJSON());
});

Sequelize.js - How to create non-trivial associations without raw SQL?

Here is my situation:
I'm using postgres 9.4, Sequelize ORM and have following models:
Service
serviceCode - primary key, string of 6 characters
serviceTitle - string
ServiceGroup
serviceCodePrefixes - array of strings that are prefixes for Service.serviceCode
serviceGroupTitle - string
Task
serviceCode - reference to Service
I need to build Task object populated with Service and ServiceGroup objects. Example:
In database:
Service {
serviceCode: '123232',
serviceTitle: 'svc title #1',
}
ServiceGroup {
serviceCodePrefix: ['12', '13', '92', ...],
serviceGroupTitle: 'svc grp title #1',
}
Task {
serviceCode: '123232',
}
Result:
Task {
service: {
serviceTitle: 'svc title #1',
},
serviceGroup: {
serviceGroupTitle: 'svc grp title #1',
},
}
The problem is that serviceCodePrefix contains not simple IDs, which can be used to create association using hasOne/belongsTo/etc., but prefix for ID.
So questions is: how this can be done without raw sql?
Turns out that right now Sequelize has experimental feature: 'on' option for 'include'. This option allows users to customize joining conditions. So my problem can be solved this way:
const Service = sequelize.define('service', {
serviceTitle: Sequelize.STRING,
serviceCode: Sequelize.STRING,
});
const ServiceGroup = sequelize.define('service_group', {
serviceGroupTitle: Sequelize.STRING,
// Array of prefixes (e.g. ['01%', '023%'])
serviceCodePrefix: Sequelize.ARRAY(Sequelize.STRING),
});
const Task = sequelize.define('task', {
taskTitle: Sequelize.STRING,
serviceCode: Sequelize.STRING,
});
Task.belongsTo(Service, { foreignKey: 'serviceCode' });
// Hack needed to allow 'include' option to work
Task.hasMany(ServiceGroup, { foreignKey: 'serviceCodePrefix', constraints: false });
// And finally
Task.findAll({
include: [
{ model: Service },
{
model: ServiceGroup,
on: [' "task"."serviceCode" LIKE ANY("serviceGroup"."serviceCodePrefix") '],
},
],
});
Not sure about the performance though.

Sails.js controller not inserting into Mongo database

I've been all over SO and Sailsjs.org trying to figure out what's going wrong, and to no avail. Just trying to learn the basics of SailsJS. I have a UserController, whose create() method gets called when a POST request is sent to /user.
create: function (req, res) {
var params = req.params.all();
User.create({
name: params.FirstName + ' ' + params.LastName,
email: params.Email,
password: params.Password,
jobTitle: params.JobTitle
}).exec(function createCB(err,created)
{
created.save(function(err)
{
// No error . . . still nothing in db
});
return res.json({name: created.name, jobTitle: created.jobTitle, email: created.email, password: created.password});
});
}
No errors here. All the request params are coming in fine and going back to the client without trouble. But nothing is actually being written to the database.
In development.js:
connections: {
mongo: {
adapter: 'sails-mongo',
host: 'localhost',
port: 27017,
// user: 'username',
// password: 'password',
database: 'sails_test'
}
},
models: {
connection: 'mongo'
}
I've tried this with the above both there in development.js, as well as separately in connections.js and models.js, respectively. No difference.
In User.js:
attributes: {
FirstName : { type: 'string' },
LastName : { type: 'string' },
Email : { type: 'string' },
Password : { type: 'string' },
JobTitle : { type: 'string' }
}
My front end request:
$.ajax({
method: 'post',
url: '/user',
data: {
FirstName: 'Yo',
LastName: 'Momma',
Email: 'yourmom#yourdadshouse.com',
Password: 'YouWish123',
JobTitle: 'Home Maker Extraordinaire'
},
success: function (sailsResponse)
{
$('#result').html(sailsResponse).fadeIn();
},
error: function()
{
console.log('error');
}
});
Again, none of this is producing an explicit error. There is just nothing being inserted into the database. Or if there is, I don't know how to find it. I've confirmed the existence of this db in the mongo shell, thusly:
show dbs
My db, sails_test shows up in the list. And I've confirmed that there isn't anything in it like so:
db.sails_test.find()
I would very much appreciate some guidance here :)
Update:
Turns out the data is being written just fine. I'm just unable to query the database from the command line. I confirmed this by first creating a sample user, and then using Waterline's findOne() method:
User.findOne({FirstName: params.FirstName}).exec(function (err, user) {
if (err) {
res.send(400);
} else if (user) {
return res.json({firstName: user.FirstName, lastName: user.LastName, jobTitle: user.JobTitle, email: user.Email, password: user.Password});
} else {
return res.send('no users match those criteria');
}
});
The above works as expected. So my problem now is simply that I cannot interact with the database from the command line. db.<collectionName>.find({}) produces nothing.
This was simply a failure to understand the MongoDb docs. I read db.collection.find({}) as DatabaseName.CollectionName.find({}), when you literally need to use db. So if my database is Test, and my collection is Users, the query is use Test, and then db.Users.find({}).
Also of note, 3T Mongo Chef is a pretty rockin' GUI (graphical user interface) for nosql databases, and it's free for non-commercial use.

Custom proxies on Stores and Models seems inconsistent (and does not work on Models)

Am using Extjs 4, and have created a custom Rest Proxy to handle communication with my Zend backend api.
(See post http://techfrere.blogspot.com/2011/08/linking-extjs4-to-zend-using-rest.html)
When using a Store to handle communication, I was using Ext.require to load the proxy, and then referenced the proxy on the type field and all was good and it loaded my data: as per:
Ext.require('App.utils.ZendRest');
...
proxy : {
type : 'zest', // My custom proxy alias
url : '/admin/user'
...
}
I then decided to try to use the proxy directly on a model... and no luck. The above logic does not work.
Problems
1. When referencing zest, it does not find the previously loaded ZendRest class (aliased to proxy.zest)
2. It tries to load the missing class from App.proxy.zest (which did not exist.)
So I tried moving my class to this location and renaming to what it seemed to want. No luck.
It loads the class, but still does not initialize the app... I get no errors anywhere so v difficult to figure out where the problem is after this...
For now it seems I will have to revert to using my Zend Rest proxy always via the Store.
Question is... has anyone else seen the behavior? Is it a bug, or am I missing something?
Thanks...
Using your proxy definition, I've managed to make it work.
I am not sure why it doesn't work for you. I have only moved ZendRest to Prj.proxy namespace and added requires: ['Prj.proxy.ZendRest'] to the model.
Code:
// controller/Primary.js
Ext.define('Prj.controller.Primary', {
extend: 'Ext.app.Controller',
stores: ['Articles'],
models: ['Article'],
views: ['article.Grid']
});
// model/Article.js
Ext.define('Prj.model.Article', {
extend: 'Ext.data.Model',
fields: [
'title', 'author', {
name: 'pubDate',
type: 'date'
}, 'link', 'description', 'content'
],
requires: ['Prj.proxy.ZendRest'],
proxy: {
type: 'zest',
url: 'feed-proxy.php'
}
});
// store/Articles.js
Ext.define('Prj.store.Articles', {
extend: 'Ext.data.Store',
autoLoad: true,
model: 'Prj.model.Article'
});
// proxy/ZendRest.js
Ext.define('Prj.proxy.ZendRest', {
extend: 'Ext.data.proxy.Ajax',
alias : 'proxy.zest',
appendId: true,
batchActions: false,
buildUrl: function(request) {
var me = this,
operation = request.operation,
records = operation.records || [],
record = records[0],
format = me.format,
reqParams = request.params,
url = me.getUrl(request),
id = record ? record.getId() : operation.id;
if (me.appendId && id) {
if (!url.match(/\/$/)) {
url += '/';
}
url += 'id/' + id;
}
if (format) {
reqParams['format'] = format;
}
/* <for example purpose> */
//request.url = url;
/* </for example purpose> */
return me.callParent(arguments);
}
}, function() {
Ext.apply(this.prototype, {
actionMethods: {
create : 'POST',
read : 'GET',
update : 'PUT',
destroy: 'DELETE'
},
/* <for example purpose> */
reader: {
type: 'xml',
record: 'item'
}
/* </for example purpose> */
});
});
Here is working sample, and here zipped code.