Dynamically setting the schema in pg-promise still warns about duplicate database objects - pg-promise

I have a database that has several different schemas, like a multi-tenant architecture, in Postgres 9.5.7.
I've followed the example in this answer to dynamically set the schema for particular queries. All of the context is encapsulated in a class and as far as I can tell I'm calling pgp in the correct way, but I still get the WARNING: Creating a duplicate database object for the same connection. error.
I have a configuration initializer like so:
// initializer.js
const initOptions = {
schema(name) {
return name ? name : 'public'
}
}
const _config = {
host: process.env.DBSERVER || nconf.get('database:server') || 'localhost',
port: process.env.DBPORT || nconf.get('database:port') || '5432',
user: process.env.DBUSER || nconf.get('database:user') || 'dev',
password: process.env.DBPASS || nconf.get('database:password') || 'pass1234',
database: process.env.DBNAME || nconf.get('database:user') || process.env.DBUSER || 'dev'
}
const pgp = require('pg-promise')(initOptions)
// default database
const pgdb = pgp(_config)
module.exports = {
pgp
, pgdb
, _config
}
And the class using it like so:
const { pgp, _config } = require('../db/initializer')
class Tenant {
constructor(name) {
this.name = name
this.db = pgp(_config, name)
}
async getMetadata() {
try {
const data = await this.db.many(`SELECT * FROM versions`)
return data
} catch (err) {
log.error(`Error getting versions for ${this.name}:`, err)
return null
}
}
}
The stack trace from this warning points to this line in the _config:
host: process.env.DBSERVER || nconf.get('database:server') || 'localhost',
I'm creating a bunch of these classes all at once and on-demand

Related

How do I seed an mssql db using Prisma and createMany

I am using Prisma - client version 2.23.0 - with an mssql database. I am trying to use createMany to seed the db. I followed the steps from the Prisma docs exactly. I have run the prisma generate and prisma db push commands successfully. My database and tables are created and Prisma connects to it just fine. Here is the seed function I am using.
// Prisma create query to seed models in database
const count = await prisma.awDemographics.createMany({
data: [
{
appointed_officials: 'some officials',
awk_state: 'FL',
meeting_schedule: 'MWF',
pending_litigation: 'none',
possible_competition: 'none',
possible_contacts: 'none',
voting_requirements: 'none',
},
{
appointed_officials: 'some officials2',
awk_state: 'FL2',
meeting_schedule: 'MWF2',
pending_litigation: 'none2',
possible_competition: 'none2',
possible_contacts: 'none2',
voting_requirements: 'none2',
},
],
});
}
Here is the result
Result:
PrismaClientUnknownRequestError2 [PrismaClientUnknownRequestError]:
Invalid `prisma.awDemographics.createMany()` invocation:
DEFAULT or NULL are not allowed as explicit identity values.
This is a non-recoverable error which probably happens when the Prisma Query Engine has a panic.
Error: Command failed with exit code 1: ts-node --eval "
// #ts-ignore
declare const require: any
console.info('Result:')
const __seed = require('./src/prisma/seed.ts')
const __keys = Object.keys(__seed)
async function runSeed() {
// Execute "seed" named export or default export
if (__keys && __keys.length) {
if (__keys.indexOf('seed') !== -1) {
return __seed.seed()
} else if (__keys.indexOf('default') !== -1) {
return __seed.default()
}
}
}
runSeed()
.then(function (result) {
if (result) {
console.log(result)
}
})
.catch(function (e) {
console.error('Error from seed:')
throw e
})
I can seed using the create function just fine.
Here is my schema:
provider = "prisma-client-js"
previewFeatures = ["microsoftSqlServer"]
}
datasource db {
provider = "sqlserver"
url = env("DATABASE_URL")
}
model AwDemographics {
// ##map(name: "aw_demographics")
id Int #id #default(autoincrement())
appointed_officials String?
awk_state String?
meeting_schedule String?
pending_litigation String?
possible_competition String?
possible_contacts String?
voting_requirements String?
}
As suggested by Ryan in the comments, updating prisma and prisma client to version 2.24.0 fixed the issue.

Cache MongoDb connection with Next.js 10 TypeScript Project - API Route

I'm trying to convert next.js/examples/with-mongodb/util/mongodb.js to TS so I can cache and resue my connections to Mongo within a TS next.js project. I'm getting a TS error on cache.promise that says:
Type 'Promise<MongoClient | { client: MongoClient; db: Db; }>' is not assignable to type 'Promise<MongoClient>'
How should I properly declare the mongo property on global to appease the TS gods?
import { MongoClient, Db } from "mongodb";
const { DATABASE_URL, DATABASE_NAME } = process.env;
declare global {
namespace NodeJS {
interface Global {
mongo: {
conn: MongoClient | null;
promise: Promise<MongoClient> | null;
};
}
}
}
let cached = global.mongo;
if (!cached) {
cached = global.mongo = { conn: null, promise: null };
}
async function connect() {
if (cached.conn) {
return cached.conn;
}
if (!cached.promise) {
const opts = {
useNewUrlParser: true,
useUnifiedTopology: true,
};
cached.promise = MongoClient.connect(DATABASE_URL, opts).then((client) => {
return {
client,
db: client.db(DATABASE_NAME),
};
});
}
cached.conn = await cached.promise;
return cached.conn;
}
export { connect };
You don't need to cache your connection, check latest nextjs with mongodb example. The official mongodb forum experts have navigated me to this example project.
Try to use native solutions
The 'conn' property you are storing contains both MongoClient and Db.
In your global declaration for mongo, you have only included MongoClient. I have the exact same code in my project and the way I handle this is to simply create a basic type called MongoConnection which contains both. Code below.
type MongoConnection = {
client: MongoClient;
db: Db;
};
declare global {
namespace NodeJS {
interface Global {
mongo: {
conn: MongoConnection | null;
promise: Promise<MongoConnection> | null;
}
}
}
}
seems like the answer is to just make the mongo property an any like this:
declare global {
namespace NodeJS {
interface Global {
mongo: any;
}
}
}

Issues mapping a HTTP response to a JSON and then mapping the JSON to a model

I am trying to map a http response to a JSON and then map part of that JSON to a Ticket interface as it has many values I don't need in my ticket interface. My app compiles without any issues but when I test the REST function I get the below runtime error, I know the issue isn't with the backend as I'm able to successfully console log the response. Any idea what I'm doing wrong here?
My error:
vendor.bundle.js:8137 Angular is running in the development mode. Call enableProdMode() to enable the production mode.
main.bundle.js:553 TypeError: res.json(...).map is not a function
at MapSubscriber.project (main.bundle.js:1330)
at MapSubscriber._next (vendor.bundle.js:42853)
at MapSubscriber.Subscriber.next (vendor.bundle.js:4709)
at XMLHttpRequest.onLoad (vendor.bundle.js:47289)
at ZoneDelegate.webpackJsonp.749.ZoneDelegate.invokeTask (polyfills.bundle.js:2478)
at Object.onInvokeTask (vendor.bundle.js:9228)
at ZoneDelegate.webpackJsonp.749.ZoneDelegate.invokeTask (polyfills.bundle.js:2477)
at Zone.webpackJsonp.749.Zone.runTask (polyfills.bundle.js:2245)
at XMLHttpRequest.ZoneTask.invoke (polyfills.bundle.js:2540)
My code:
retrieveTicket(barcode: string) : Observable<any> {
return this.http.get(`${this.API_URL}POS/RetrieveTicket/${barcode}`, this.options)
.map((res: Response) => res.json().map(ticket => {
Object.assign({
ResponseCode: ticket.ResponseCode,
CustomError: ticket.CustomError,
ticketDate: ticket.POSTicket.Date,
ticketTime: ticket.POSTicket.EndTime,
cashierName: ticket.POSBarCode.POSCashier_Name,
tranNo: ticket.POSTicket.TranNo,
tranValue: ticket.POSTicket.ScanValue,
securityChecked: ticket.POSBarCode.SecurityChecked
}) as ITicket})
)
.catch((error: any) => Observable.throw(error || 'server error'));
}
my interface:
import { BaseRequestInterface } from './base-request.interface';
export interface ITicket extends IBaseRequest {
ticketDate: string;
ticketTime: string;
cashierName: string;
tranNo: string;
tranValue: string;
timeSincePurchase: string;
securityChecked: boolean;
export function setTicket(obj?: any) {
super();
this.ResponseCode = obj && obj.ResponseCode || null;
this.CustomError = obj && obj.CustomError || null;
this.ticketDate = obj && obj.ticketDate || null;
this.ticketTime = obj && obj.ticketTime || null;
this.cashierName = obj && obj.cashierName || null;
this.tranNo = obj && obj.tranNo || null;
this.tranValue = obj && obj.tranValue || null;
this.timeSincePurchase = obj && obj.timeSincePurchase || null;
this.securityChecked = obj && obj.securityChecked || null;
}
}
my BaseRequest Interface:
// Base request class that returns from BRMService API
export interface IBaseRequest {
// Public properties available
BaseURI?: string;
CustomError?: string;
ProviderName?: string;
RequestFormData?: string;
RequestURI?: string;
ResponseCode?: number;
}
EDIT:
We learned that we are dealing with just one Object, rather than an array of object, so mapping the object to type TicketModel would be done like the following (shortened):
retrieveTicket(barcode: string) {
return this.http.get(...)
.map(res => res.json())
.map(res => ({ResponseCode:res.ResponseCode, CustomError:res.CustomError}) as TicketModel)
}
The interface:
export interface TicketModel {
ResponseCode: number;
CustomError:string
}
DEMO
ORIGINAL POST:
Unclear if you are using a class or interface for your TicketModel, anyway I suggest you use an interface ;) Then you can simply map your incoming data like (shortened version):
.map((res:Response) => res.json().map(x =>
Object.assign({ResponseCode: x.ResponseCode,
CustomError:x.CustomError}) as TicketModel)))
If your response is an object with an array containing inside items, just add the items in:
.... res.json().items.map( ...

apollostack/graphql-server - how to get the fields requested in a query from resolver

I am trying to figure out a clean way to work with queries and mongdb projections so I don't have to retrieve excessive information from the database.
So assuming I have:
// the query
type Query {
getUserByEmail(email: String!): User
}
And I have a User with an email and a username, to keep things simple. If I send a query and I only want to retrieve the email, I can do the following:
query { getUserByEmail(email: "test#test.com") { email } }
But in the resolver, my DB query still retrieves both username and email, but only one of those is passed back by apollo server as the query result.
I only want the DB to retrieve what the query asks for:
// the resolver
getUserByEmail(root, args, context, info) {
// check what fields the query requested
// create a projection to only request those fields
return db.collection('users').findOne({ email: args.email }, { /* projection */ });
}
Of course the problem is, getting information on what the client is requesting isn't so straightforward.
Assuming I pass in request as context - I considered using context.payload (hapi.js), which has the query string, and searching it through various .split()s, but that feels kind of dirty. As far as I can tell, info.fieldASTs[0].selectionSet.selections has the list of fields, and I could check for it's existence in there. I'm not sure how reliable this is. Especially when I start using more complex queries.
Is there a simpler way?
In case you don't use mongDB, a projection is an additional argument you pass in telling it explicitly what to retrieve:
// telling mongoDB to not retrieve _id
db.collection('users').findOne({ email: 'test#test.com' }, { _id: 0 })
As always, thanks to the amazing community.
2020-Jan answer
The current answer to getting the fields requested in a GraphQL query, is to use the graphql-parse-resolve-info library for parsing the info parameter.
The library is "a pretty complete solution and is actually used under the hood by postgraphile", and is recommended going forward by the author of the other top library for parsing the info field, graphql-fields.
Use graphql-fields
Apollo server example
const rootSchema = [`
type Person {
id: String!
name: String!
email: String!
picture: String!
type: Int!
status: Int!
createdAt: Float
updatedAt: Float
}
schema {
query: Query
mutation: Mutation
}
`];
const rootResolvers = {
Query: {
users(root, args, context, info) {
const topLevelFields = Object.keys(graphqlFields(info));
return fetch(`/api/user?fields=${topLevelFields.join(',')}`);
}
}
};
const schema = [...rootSchema];
const resolvers = Object.assign({}, rootResolvers);
// Create schema
const executableSchema = makeExecutableSchema({
typeDefs: schema,
resolvers,
});
Sure you can. This is actually the same functionality that is implemented on join-monster package for SQL based db's. There's a talk by their creator: https://www.youtube.com/watch?v=Y7AdMIuXOgs
Take a look on their info analysing code to get you started - https://github.com/stems/join-monster/blob/master/src/queryASTToSqlAST.js#L6-L30
Would love to see a projection-monster package for us mongo users :)
UPDATE:
There is a package that creates a projection object from info on npm: https://www.npmjs.com/package/graphql-mongodb-projection
You can generate MongoDB projection from info argument. Here is the sample code that you can follow
/**
* #description - Gets MongoDB projection from graphql query
*
* #return { object }
* #param { object } info
* #param { model } model - MongoDB model for referencing
*/
function getDBProjection(info, model) {
const {
schema: { obj }
} = model;
const keys = Object.keys(obj);
const projection = {};
const { selections } = info.fieldNodes[0].selectionSet;
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const isSelected = selections.some(
selection => selection.name.value === key
);
projection[key] = isSelected;
}
console.log(projection);
}
module.exports = getDBProjection;
With a few helper functions you can use it like this (typescript version):
import { parceGqlInfo, query } from "#backend";
import { GraphQLResolveInfo } from "graphql";
export const user = async (parent: unknown, args: unknown, ctx: unknown, info: GraphQLResolveInfo): Promise<User | null> => {
const { dbQueryStr } = parceGqlInfo(info, userFields, "id");
const [user] = await query(`SELECT ${dbQueryStr} FROM users WHERE id=$1;`, [1]);
return user;
};
Helper functions.
Few points:
gql_uid used as ID! string type from primary key to not change db types
required option is used for dataloaders (if field was not requested by user)
allowedFields used to filter additional fields from info like '__typename'
queryPrefix is used if you need to prefix selected fields like select u.id from users u
const userFields = [
"gql_uid",
"id",
"email"
]
// merge arrays and delete duplicates
export const mergeDedupe = <T>(arr: any[][]): T => {
// #ts-ignore
return ([...new Set([].concat(...arr))] as unknown) as T;
};
import { parse, simplify, ResolveTree } from "graphql-parse-resolve-info";
import { GraphQLResolveInfo } from "graphql";
export const getQueryFieldsFromInfo = <Required = string>(info: GraphQLResolveInfo, options: { required?: Required[] } = {}): string[] => {
const { fields } = simplify(parse(info) as ResolveTree, info.returnType) as { fields: { [key: string]: { name: string } } };
let astFields = Object.entries(fields).map(([, v]) => v.name);
if (options.required) {
astFields = mergeDedupe([astFields, options.required]);
}
return astFields;
};
export const onlyAllowedFields = <T extends string | number>(raw: T[] | readonly T[], allowed: T[] | readonly T[]): T[] => {
return allowed.filter((f) => raw.includes(f));
};
export const parceGqlInfo = (
info: GraphQLResolveInfo,
allowedFields: string[] | readonly string[],
gqlUidDbAlliasField: string,
options: { required?: string[]; queryPrefix?: string } = {}
): { pureDbFields: string[]; gqlUidRequested: boolean; dbQueryStr: string } => {
const fieldsWithGqlUid = onlyAllowedFields(getQueryFieldsFromInfo(info, options), allowedFields);
return {
pureDbFields: fieldsWithGqlUid.filter((i) => i !== "gql_uid"),
gqlUidRequested: fieldsWithGqlUid.includes("gql_uid"),
dbQueryStr: fieldsWithGqlUid
.map((f) => {
const dbQueryStrField = f === "gql_uid" ? `${gqlUidDbAlliasField}::Text AS gql_uid` : f;
return options.queryPrefix ? `${options.queryPrefix}.${dbQueryStrField}` : dbQueryStrField;
})
.join(),
};
};

How can I wrap sails-mongo db methods for profiling?

I'm trying to setup a sails hook with miniprofiler to help profile mongo usage. I'm struggling for how to wrap the db methods in a function that will execute the profile. I'm trying to do this via a user hook:
setupMiniprofilerMongo(req, res, next) {
const adapter = sails.hooks.orm.datastores.default.adapter;
const adapterPrototype = Object.getPrototypeOf(adapter);
const originalMethod = adapter.adapter.find;
methodPrototype.find = function profiledMongoCommand(connectionName, collectionName, options, cb) {
sails.log.info(`${collectionName}.find`);
return originalMethod.call(adapter, connectionName, collectionName, options, cb);
};
}
That causes the following error to be thrown:
TypeError: Cannot read property 'collections' of undefined
at Object.module.exports.adapter.find (/Users/jgeurts/dev/platform/node_modules/sails-mongo/lib/adapter.js:349:40)
at Object.profiledMongoCommand [as find] (/Users/jgeurts/dev/platform/config/http.js:234:37)
Any help would be appreciated. I tried to wrap the methods on mongodb package, but that doesn't seem to work either. :/
I got this working by wrapping waterline query methods. There is room for improvement, though.
setupMiniprofilerWaterline(req, res, next) {
const dbOperations = [
'count',
'create',
'createEach',
'define',
'describe',
'destroy',
'drop',
'find',
'join',
// 'native',
// 'registerConnection',
'update',
];
const waterlineMethodByModels = {};
const miniprofilerWaterline = () => {
return {
name: 'mongodb',
handler(req, res, next) {
if (!req.miniprofiler || !req.miniprofiler.enabled) {
return next();
}
const profiler = req.miniprofiler;
for (const modelName of _.keys(sails.models)) {
for (const dbOperation of dbOperations) {
const model = sails.models[modelName];
if (!model[dbOperation]) {
continue;
}
if (!waterlineMethodByModels[modelName]) {
waterlineMethodByModels[modelName] = {};
}
// Prevent wrapping a method more than once
if (waterlineMethodByModels[modelName][dbOperation]) {
continue;
}
waterlineMethodByModels[modelName][dbOperation] = true;
const originalMethod = model[dbOperation];
model[dbOperation] = function profiledMongoCommand(...args) {
const query = args && args.length ? args[0] : '';
const lastArg = args && args.length ? args[args.length - 1] : null;
const modelAndMethod = `${modelName}.${dbOperation}`;
if (lastArg && typeof lastArg === 'function') {
sails.log.debug(`mongo::${modelAndMethod} - ${JSON.stringify(query)}`);
const callback = args.pop();
const timing = profiler.startTimeQuery('mongodb', query ? JSON.stringify(query || '') : '');
// In general, the callstack is kind of useless to us for these profiles
// The model/db method is more useful in the miniprofiler UI
timing.callStack = `\n\nMethod: ${modelAndMethod}`;
return originalMethod.call(this, ...args, function profiledResult(...results) {
profiler.stopTimeQuery(timing);
callback(...results);
});
}
const methodResult = originalMethod.call(this, ...args);
const methodResultPrototype = Object.getPrototypeOf(methodResult);
const isDeferred = !!methodResultPrototype.exec;
// If methodResult is a Deferred object type, then the query method will be profiled above when the deferred is executed (with a callback)
// So we only care to log this if the methodResult is not a deferred object
if (!isDeferred) {
sails.log.warn(`Was not able to profile mongo::${modelAndMethod}. Maybe its a promise? query: ${JSON.stringify(query)}`);
}
return methodResult;
};
}
}
next();
},
};
};
miniprofiler.express.for(miniprofilerWaterline())(req, res, next);
},
The code is available as miniprofiler-waterline if you want to contribute/use it in your own projects