Different Read/Write types for FirestoreDataConverter - google-cloud-firestore

Is there a way to use different types for reading and writing data using the FirebaseDataConverter?
The typing of FirebaseDataConverter<T> suggest that there should only be a single type T, which is both what you would get back when querying and what you should provide when writing.
But in the scenario outlined below, I have two types, InsertComment which is what I should provide when creating a new comment, and Comment, which is an enriched object that has the user's current name and the firebase path of the object added to it.
But there is no way to express that I have these two types. Am I missing something?
type Comment = { userId: string, userName: string, comment: string, _firebasePath: string }
type InsertComment = { userId: string, comment: string }
function lookupName(_id: string) { return 'Steve' }
const commentConverter: FirestoreDataConverter<Comment> = {
fromFirestore(snapshot, options) {
const { userId, comment } = snapshot.data(options)
return {
userId,
comment,
name: lookupName(userId),
_firebasePath: snapshot.ref.path,
} as any as Comment
},
// Here I wish I could write the below, but it gives me a type error
// toFirestore(modelObject: InsertComment) {
toFirestore(modelObject) {
return modelObject
},
}
const commentCollection = collection(getFirestore(), 'Comments').withConverter(commentConverter)
// This works great and is typesafe
getDocs(commentCollection).then(snaps => {
snaps.docs.forEach(snap => {
const { comment, userName, _firebasePath } = snap.data()
console.info(`${userName} said "${comment}" (path: ${_firebasePath})`)
})
})
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// This gives me the type-error: that fields "userName, _firebasePath" are missing
addDoc(commentCollection, { comment: 'Hello World', userId: '123' })

I found a workaround, but I don't think this ought to be the way it should be done. It feels hacky.
Basically, I make two DataConverters, one for reading and one for writing.
I make the one for reading the default one, and when I need to write, I overwrite the read-converter with the write-converter.
function createReadFirestoreConverter<T>(validator: Validator<T>): FirestoreDataConverter<T> {
return {
fromFirestore(snapshot, options) {
return validator({ ...snapshot.data(options), _id: snapshot.id, _path: snapshot.ref.path })
},
toFirestore() {
throw new Error('Firestore converter not configured for writing')
},
}
}
function createWriteFirestoreConverter<T>(validator: Validator<T>) {
return {
fromFirestore() {
throw new Error('Firestore converter not configured for reading')
},
toFirestore(modelObject: any) {
return validator(modelObject)
},
} as FirestoreDataConverter<any>
}
const installedComponentConverterRead = createReadFirestoreConverter(installedComponentValidator)
const installedComponentConverterWrite = createWriteFirestoreConverter(newInstalledComponentValidator)
const readCollection = collection(getFirestore(), `MachineCards/${machineCard._id}/Components`).withConverter(installedComponentConverterRead)
// If I need to write
const docRef = doc(readCollection, 'newDocId').withConverter(installedComponentConverterWrite)

Related

Using the $inc function across the MongoDB documents

I am currently working on a moderation system for my Discord bot and came across an unexpected issue. I've been using the $inc function to increase the values for a single document, though I have sadly not achieved to use the $inc function across multiple different documents, meaning I would like to increase ($inc) the value of the new document according to the numbers of the previous document.
Example: Cases
Current code:
async run(client, message, args, Discord) {
const targetMention = message.mentions.users.first()
const userid = args[0]
const targetId = client.users.cache.find(user => user.id === userid)
const username = targetMention.tag
if(targetMention){
args.shift()
const userId = targetMention.id
const WarnedBy = message.author.tag
const reason = args.join(' ')
if(!reason) {
message.delete()
message.reply('You must state the reason behind the warning you are attempting to apply.').then(message => {
message.delete({ timeout: 6000})
});
return;
}
const warningApplied = new Discord.MessageEmbed()
.setColor('#ffd200')
.setDescription(`A warning has been applied to ${targetMention.tag} :shield:`)
let reply = await message.reply(warningApplied)
let replyID = reply.id
message.reply(replyID)
const warning = {
UserId: userId,
WarnedBy: WarnedBy,
Timestamp: new Date().getTime(),
Reason: reason,
}
await database().then(async database => {
try{
await warnsSchema.findOneAndUpdate({
Username: username,
MessageID: replyID
}, {
$inc: {
Case: 1
},
WarnedBy: WarnedBy,
$push: {
warning: warning
}
}, {
upsert: true
})
} finally {
database.connection.close()
}
})
}
if(targetId){
args.shift()
const userId = message.member.id
const WarnedBy = message.author.tag
const reason = args.join(' ')
if(!reason) {
message.delete()
message.reply('You must state the reason behind the warning you are attempting to apply.').then(message => {
message.delete({ timeout: 6000})
});
return;
}
const warning = {
userId: userId,
WarnedBy: WarnedBy,
timestamp: new Date().getTime(),
reason: reason
}
await database().then(async database => {
try{
await warnsSchema.findOneAndUpdate({
userId,
}, {
$inc: {
Case: 1
},
WarnedBy: WarnedBy,
$push: {
warning: warning
}
}, {
upsert: true
})
} finally {
database.connection.close()
}
const warningApplied = new Discord.MessageEmbed()
.setColor('#ffd200')
.setDescription(`A warning has been applied to ${targetId.tag} :shield:`)
message.reply(warningApplied)
message.delete();
})
}
}
Schema attached to the Code:
const warnsSchema = database.Schema({
Username: {
type: String,
required: true
},
MessageID: {
type: String,
required: true
},
Case: {
type: Number,
required: true
},
warning: {
type: [Object],
required: true
}
})
module.exports = database.model('punishments', warnsSchema)
Answer to my own question. For all of those who are attempting to do exactly the same as me, there is an easier way to get this to properly work. The $inc (increase) function will not work as the main property of a document. An easier way to implement this into your database would be by creating a .json file within your Discord bot files and adding a line such as the following:
{
"Number": 0
}
After that, you'd want to "npm i fs" in order to read directories in live time.
You can proceed to add a function to either increase or decrease the value of the "Number".
You must make sure to import the variable to your current coding document by typing:
const {Number} = require('./config.json')
config.json can be named in any way, it just serves as an example.
Now you'd be able to console.log(Number) in order to make sure the number is what you expected it to be, as well as you can now increase it by typing Number+=[amount]
Hope it was helpful.

How to avoid unexpected side effect in computed properties - VueJS

I am trying to prefill a form with data from a vuex store.In the code provided is the expected result, I need but I know that this is not the way to do it. I am fairly new to Vue/Vuex. The inputs use a v-model thats why i cant use :value="formInformation.parentGroup" to prefill.
data() {
return {
groupName: { text: '', state: null },
parentName: { text: '', state: null },
};
},
computed: {
formInformation() {
const groups = this.$store.getters.groups;
const activeForm = this.$store.getters.activeForm;
if (activeForm.groupIndex) {
const formInfo = groups[0][activeForm.groupIndex][activeForm.formIndex]
this.groupName.text = formInfo.name // Is there a way to not use this unexpected side effect ?
return formInfo;
} else {
return 'No Form Selected';
}
},
},
I searched for an answere for so long now that i just needed to ask it. Maybe i am just googling for something wrong, but maybe someone here can help me.
You are doing all right, just a little refactoring and separation is needed - separate all the logic to computed properties (you can also use mapGetters):
mounted() {
if (this.formInformation) {
this.$set(this.groupName.text, this.formInformation.name);
}
},
computed: {
groups() {
return this.$store.getters.groups;
},
activeForm() {
return this.$store.getters.activeForm;
},
formInformation() {
if (this.activeForm.groupIndex) {
return this.groups[0][this.activeForm.groupIndex][
this.activeForm.formIndex
];
}
}
}
You could either make groupName a computed property:
computed: {
groupName() {
let groupName = { text: '', state: null };
if (formInformation.name) {
return groupName.text = formInfo.name;
}
return groupName;
}
Or you could set a watcher on formInformation:
watch: {
formInformation: function (newFormInformation, oldFormInformation) {
this.groupName.text = formInfo.name;
}
},
Avoid mutating data property in computed.
Computed are meant to do some operation (eg. reduce, filter etc) on data properties & simply return the result.
Instead, you can try this:
computed: {
formInformation() {
const groups = this.$store.getters.groups;
const activeForm = this.$store.getters.activeForm;
if (activeForm.groupIndex) {
const formInfo = groups[0][activeForm.groupIndex][activeForm.formIndex]
// this.groupName.text = formInfo.name // <-- [1] simply, remove this
return formInfo;
} else {
return 'No Form Selected';
}
}
},
// [2] add this, so on change `formInformation` the handler will get called
watch: {
formInformation: {
handler (old_value, new_value) {
if (new_value !== 'No Form Selected') { // [3] to check if some form is selected
this.groupName.text = new_value.name // [4] update the data property with the form info
},
deep: true, // [5] in case your object is deeply nested
}
}
}

How to implement a node query resolver with apollo / graphql

I am working on implementing a node interface for graphql -- a pretty standard design pattern.
Looking for guidance on the best way to implement a node query resolver for graphql
node(id ID!): Node
The main thing that I am struggling with is how to encode/decode the ID the typename so that we can find the right table/collection to query from.
Currently I am using postgreSQL uuid strategy with pgcrytpo to generate ids.
Where is the right seam in the application to do this?:
could be done in the primary key generation at the database
could be done at the graphql seam (using a visitor pattern maybe)
And once the best seam is picked:
how/where do you encode/decode?
Note my stack is:
ApolloClient/Server (from graphql-yoga)
node
TypeORM
PostgreSQL
The id exposed to the client (the global object id) is not persisted on the backend -- the encoding and decoding should be done by the GraphQL server itself. Here's a rough example based on how relay does it:
import Foo from '../../models/Foo'
function encode (id, __typename) {
return Buffer.from(`${id}:${__typename}`, 'utf8').toString('base64');
}
function decode (objectId) {
const decoded = Buffer.from(objectId, 'base64').toString('utf8')
const parts = decoded.split(':')
return {
id: parts[0],
__typename: parts[1],
}
}
const typeDefs = `
type Query {
node(id: ID!): Node
}
type Foo implements Node {
id: ID!
foo: String
}
interface Node {
id: ID!
}
`;
// Just in case model name and typename do not always match
const modelsByTypename = {
Foo,
}
const resolvers = {
Query: {
node: async (root, args, context) => {
const { __typename, id } = decode(args.id)
const Model = modelsByTypename[__typename]
const node = await Model.getById(id)
return {
...node,
__typename,
};
},
},
Foo: {
id: (obj) => encode(obj.id, 'Foo')
}
};
Note: by returning the __typename, we're letting GraphQL's default resolveType behavior figure out which type the interface is returning, so there's no need to provide a resolver for __resolveType.
Edit: to apply the id logic to multiple types:
function addIDResolvers (resolvers, types) {
for (const type of types) {
if (!resolvers[type]) {
resolvers[type] = {}
}
resolvers[type].id = encode(obj.id, type)
}
}
addIDResolvers(resolvers, ['Foo', 'Bar', 'Qux'])
#Jonathan I can share an implementation that I have and you see what you think. This is using graphql-js, MongoDB and relay on the client.
/**
* Given a function to map from an ID to an underlying object, and a function
* to map from an underlying object to the concrete GraphQLObjectType it
* corresponds to, constructs a `Node` interface that objects can implement,
* and a field config for a `node` root field.
*
* If the typeResolver is omitted, object resolution on the interface will be
* handled with the `isTypeOf` method on object types, as with any GraphQL
* interface without a provided `resolveType` method.
*/
export function nodeDefinitions<TContext>(
idFetcher: (id: string, context: TContext, info: GraphQLResolveInfo) => any,
typeResolver?: ?GraphQLTypeResolver<*, TContext>,
): GraphQLNodeDefinitions<TContext> {
const nodeInterface = new GraphQLInterfaceType({
name: 'Node',
description: 'An object with an ID',
fields: () => ({
id: {
type: new GraphQLNonNull(GraphQLID),
description: 'The id of the object.',
},
}),
resolveType: typeResolver,
});
const nodeField = {
name: 'node',
description: 'Fetches an object given its ID',
type: nodeInterface,
args: {
id: {
type: GraphQLID,
description: 'The ID of an object',
},
},
resolve: (obj, { id }, context, info) => (id ? idFetcher(id, context, info) : null),
};
const nodesField = {
name: 'nodes',
description: 'Fetches objects given their IDs',
type: new GraphQLNonNull(new GraphQLList(nodeInterface)),
args: {
ids: {
type: new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(GraphQLID))),
description: 'The IDs of objects',
},
},
resolve: (obj, { ids }, context, info) => Promise.all(ids.map(id => Promise.resolve(idFetcher(id, context, info)))),
};
return { nodeInterface, nodeField, nodesField };
}
Then:
import { nodeDefinitions } from './node';
const { nodeField, nodesField, nodeInterface } = nodeDefinitions(
// A method that maps from a global id to an object
async (globalId, context) => {
const { id, type } = fromGlobalId(globalId);
if (type === 'User') {
return UserLoader.load(context, id);
}
....
...
...
// it should not get here
return null;
},
// A method that maps from an object to a type
obj => {
if (obj instanceof User) {
return UserType;
}
....
....
// it should not get here
return null;
},
);
The load method resolves the actual object. This part you would have work more specifically with your DB and etc...
If it's not clear, you can ask! Hope it helps :)

feathers-mongodb Service.find({query: {_id}}) returns null

I have the schemas below:
students.graphql.schema.js
export default [
`
type StudentsWithPagination {
total: Int
items: [Students]
}
type Students {
_id: String!
name: String
address: Addresses
}
`,
];
addresses.graphql.schema.js
export default [
`
type AddressesWithPagination {
total: Int
items: [Addresses]
}
type Addresses {
_id: String!
title: String
}
`,
];
I have created two services by running feathers generate service students.service.js and addresses.services.js.
When I search addresses by title, I get result. However, when I search by _id, I get null. Something like:
const studentsResolvers = {
Students: {
address: student => {
const query = {
_id: student.address
}
return Addresses.find({ query }).then(result => {
console.log(result)
})
}
}
}
The code above produces null though student.address returns the right address._id. I still get null even I hardcode student.address with the right address._id
The code above will return null unless I search by address title. Something like:
const query = {
title: 'my-location'
}
_id is of type String, not ObjectID.
What am I doing wrong?
As documented in the feathers-mongodb adapter, since MongoDB itself (unlike Mongoose) does not have a schema, all query parameters have to be converted to the type in the database in a hook manually. The example can be adapted accordingly for $in queries:
const ObjectID = require('mongodb').ObjectID;
app.service('users').hooks({
before: {
find(context) {
const { query = {} } = context.params;
if(query._id) {
query._id = new ObjectID(query._id);
}
if(query.age !== undefined) {
query.age = parseInt(query.age, 10);
}
context.params.query = query;
return Promise.resolve(context);
}
}
});

apollostack/graphql-server - how to get the fields requested in a query from resolver

I am trying to figure out a clean way to work with queries and mongdb projections so I don't have to retrieve excessive information from the database.
So assuming I have:
// the query
type Query {
getUserByEmail(email: String!): User
}
And I have a User with an email and a username, to keep things simple. If I send a query and I only want to retrieve the email, I can do the following:
query { getUserByEmail(email: "test#test.com") { email } }
But in the resolver, my DB query still retrieves both username and email, but only one of those is passed back by apollo server as the query result.
I only want the DB to retrieve what the query asks for:
// the resolver
getUserByEmail(root, args, context, info) {
// check what fields the query requested
// create a projection to only request those fields
return db.collection('users').findOne({ email: args.email }, { /* projection */ });
}
Of course the problem is, getting information on what the client is requesting isn't so straightforward.
Assuming I pass in request as context - I considered using context.payload (hapi.js), which has the query string, and searching it through various .split()s, but that feels kind of dirty. As far as I can tell, info.fieldASTs[0].selectionSet.selections has the list of fields, and I could check for it's existence in there. I'm not sure how reliable this is. Especially when I start using more complex queries.
Is there a simpler way?
In case you don't use mongDB, a projection is an additional argument you pass in telling it explicitly what to retrieve:
// telling mongoDB to not retrieve _id
db.collection('users').findOne({ email: 'test#test.com' }, { _id: 0 })
As always, thanks to the amazing community.
2020-Jan answer
The current answer to getting the fields requested in a GraphQL query, is to use the graphql-parse-resolve-info library for parsing the info parameter.
The library is "a pretty complete solution and is actually used under the hood by postgraphile", and is recommended going forward by the author of the other top library for parsing the info field, graphql-fields.
Use graphql-fields
Apollo server example
const rootSchema = [`
type Person {
id: String!
name: String!
email: String!
picture: String!
type: Int!
status: Int!
createdAt: Float
updatedAt: Float
}
schema {
query: Query
mutation: Mutation
}
`];
const rootResolvers = {
Query: {
users(root, args, context, info) {
const topLevelFields = Object.keys(graphqlFields(info));
return fetch(`/api/user?fields=${topLevelFields.join(',')}`);
}
}
};
const schema = [...rootSchema];
const resolvers = Object.assign({}, rootResolvers);
// Create schema
const executableSchema = makeExecutableSchema({
typeDefs: schema,
resolvers,
});
Sure you can. This is actually the same functionality that is implemented on join-monster package for SQL based db's. There's a talk by their creator: https://www.youtube.com/watch?v=Y7AdMIuXOgs
Take a look on their info analysing code to get you started - https://github.com/stems/join-monster/blob/master/src/queryASTToSqlAST.js#L6-L30
Would love to see a projection-monster package for us mongo users :)
UPDATE:
There is a package that creates a projection object from info on npm: https://www.npmjs.com/package/graphql-mongodb-projection
You can generate MongoDB projection from info argument. Here is the sample code that you can follow
/**
* #description - Gets MongoDB projection from graphql query
*
* #return { object }
* #param { object } info
* #param { model } model - MongoDB model for referencing
*/
function getDBProjection(info, model) {
const {
schema: { obj }
} = model;
const keys = Object.keys(obj);
const projection = {};
const { selections } = info.fieldNodes[0].selectionSet;
for (let i = 0; i < keys.length; i++) {
const key = keys[i];
const isSelected = selections.some(
selection => selection.name.value === key
);
projection[key] = isSelected;
}
console.log(projection);
}
module.exports = getDBProjection;
With a few helper functions you can use it like this (typescript version):
import { parceGqlInfo, query } from "#backend";
import { GraphQLResolveInfo } from "graphql";
export const user = async (parent: unknown, args: unknown, ctx: unknown, info: GraphQLResolveInfo): Promise<User | null> => {
const { dbQueryStr } = parceGqlInfo(info, userFields, "id");
const [user] = await query(`SELECT ${dbQueryStr} FROM users WHERE id=$1;`, [1]);
return user;
};
Helper functions.
Few points:
gql_uid used as ID! string type from primary key to not change db types
required option is used for dataloaders (if field was not requested by user)
allowedFields used to filter additional fields from info like '__typename'
queryPrefix is used if you need to prefix selected fields like select u.id from users u
const userFields = [
"gql_uid",
"id",
"email"
]
// merge arrays and delete duplicates
export const mergeDedupe = <T>(arr: any[][]): T => {
// #ts-ignore
return ([...new Set([].concat(...arr))] as unknown) as T;
};
import { parse, simplify, ResolveTree } from "graphql-parse-resolve-info";
import { GraphQLResolveInfo } from "graphql";
export const getQueryFieldsFromInfo = <Required = string>(info: GraphQLResolveInfo, options: { required?: Required[] } = {}): string[] => {
const { fields } = simplify(parse(info) as ResolveTree, info.returnType) as { fields: { [key: string]: { name: string } } };
let astFields = Object.entries(fields).map(([, v]) => v.name);
if (options.required) {
astFields = mergeDedupe([astFields, options.required]);
}
return astFields;
};
export const onlyAllowedFields = <T extends string | number>(raw: T[] | readonly T[], allowed: T[] | readonly T[]): T[] => {
return allowed.filter((f) => raw.includes(f));
};
export const parceGqlInfo = (
info: GraphQLResolveInfo,
allowedFields: string[] | readonly string[],
gqlUidDbAlliasField: string,
options: { required?: string[]; queryPrefix?: string } = {}
): { pureDbFields: string[]; gqlUidRequested: boolean; dbQueryStr: string } => {
const fieldsWithGqlUid = onlyAllowedFields(getQueryFieldsFromInfo(info, options), allowedFields);
return {
pureDbFields: fieldsWithGqlUid.filter((i) => i !== "gql_uid"),
gqlUidRequested: fieldsWithGqlUid.includes("gql_uid"),
dbQueryStr: fieldsWithGqlUid
.map((f) => {
const dbQueryStrField = f === "gql_uid" ? `${gqlUidDbAlliasField}::Text AS gql_uid` : f;
return options.queryPrefix ? `${options.queryPrefix}.${dbQueryStrField}` : dbQueryStrField;
})
.join(),
};
};