Error while executing unit test: argument type error - flutter

I am writing a unit test for a converter that converts a group from a local model to a database one and vice versa. When running a unit test, an error occurs:
I divided the test into two groups: In one I convert from the database model to the local one, and in the other - the reverse. The first test works well, but when I add the second test, this error occurs. How can it be solved?
My Test:
main() {
group('Group converter(db model to a local model)', () {
test('The result should be a database model converted to a local model',
() {
final groupConverter = GroupDbConverter();
GroupEntity dbModel = GroupEntity(
id: 1,
groupName: 'Test group',
students: ToMany(),
);
Group expected = Group(
id: 1,
groupName: 'Test group',
students: [],
);
final actual = groupConverter.inToOut(dbModel);
// func for compare students list
Function deepEq = const DeepCollectionEquality().equals;
bool isEqual = deepEq(expected.students, actual.students);
expect(actual.id == expected.id, true);
expect(actual.groupName == expected.groupName, true);
expect(isEqual, true);
});
});
group('Group converter(local model to a db model)', () {
test('The result should be a local model converted to a database model',
() {
final groupConverter = GroupDbConverter();
Group grModel = Group(
id: 3,
groupName: 'Test',
students: [],
);
GroupEntity expected = GroupEntity(
id: 3,
groupName: 'Test',
students: ToMany(),
);
final actual = groupConverter.outToIn(grModel);
// func for compare students list
Function deepEq = const DeepCollectionEquality().equals;
bool isEqual = deepEq(expected.students, actual.students);
expect(actual.id == expected.id, true);
expect(actual.groupName == expected.groupName, true);
expect(isEqual, true);
});
});
}

Related

Use Promise.all() inside mongodb transaction is not working propertly in Nestjs

Hi I am working in a NestJS project using mongodb and mongoose.
I have to create transactions with a lot of promises inside, so i think it was a good idea to use Promise.all() inside my transaction for performace issues.
Unfortunately when i started working with my transactions i have a first issue, i was using
session.startTransaction(); and my code was throwing the following error:
Given transaction number 2 does not match any in-progress transactions. The active transaction number is 1, the error was thrown sometimes, not always but it was a problem
So i read the following question Mongoose `Promise.all()` Transaction Error, and i started to use withTransaction(), this solved the problem, but now mi code does not work propertly.
the code basically takes an array of bookings and then creates them, also needs to create combos of the bookings, what I need is that if a creation of a booking or a combo fail nothing should be inserted, for perfomance I use Promise.all().
But when i execute the function sometimes it creates more bookings than expected, if bookingsArray is from size 2, some times it creates 3 bookings and i just don't know why, this occurs very rarely but it is a big issue.
If i remove the Promise.all() from the transaction it works perfectly, but without Promise.all() the query is slow, so I wanted to know if there is any error in my code, or if you just cannot use Promise.all() inside a mongodb transaction in Nestjs
Main function with the transaction and Promise.all(), this one sometimes create the wrong number of bookings
async createMultipleBookings(
userId: string,
bookingsArray: CreateBookingDto[],
): Promise<void> {
const session = await this.connection.startSession();
await session.withTransaction(async () => {
const promiseArray = [];
for (let i = 0; i < bookingsArray.length; i++) {
promiseArray.push(
this.bookingRepository.createSingleBooking(
userId,
bookingsArray[i],
session,
),
);
}
promiseArray.push(
this.bookingRepository.createCombosBookings(bookingsArray, session),
);
await Promise.all(promiseArray);
});
session.endSession();
}
Main function with the transaction and withot Promise.all(), works fine but slow
async createMultipleBookings(
userId: string,
bookingsArray: CreateBookingDto[],
): Promise<void> {
const session = await this.connection.startSession();
await session.withTransaction(async () => {
for (let i = 0; i < bookingsArray.length; i++) {
await this.bookingRepository.createSingleBooking(
userId,
bookingsArray[i],
session,
);
}
await this.bookingRepository.createCombosBookings(bookingsArray, session);
});
session.endSession();
}
Functions called inside the main function
async createSingleBooking(
userId: string,
createBookingDto: CreateBookingDto,
session: mongoose.ClientSession | null = null,
) {
const product = await this.productsService.getProductById(
createBookingDto.productId,
session,
);
const user = await this.authService.getUserByIdcustomAttributes(
userId,
['profile', 'name'],
session,
);
const laboratory = await this.laboratoryService.getLaboratoryById(
product.laboratoryId,
session,
);
if (product.state !== State.published)
throw new BadRequestException(
`product ${createBookingDto.productId} is not published`,
);
const bookingTracking = this.createBookingTraking();
const value = product.prices.find(
(price) => price.user === user.profile.role,
);
const bookingPrice: Price = !value
? {
user: user.profile.role,
measure: Measure.valorACotizar,
price: null,
}
: value;
await new this.model({
...createBookingDto,
userId,
canceled: false,
productType: product.productType,
bookingTracking,
bookingPrice,
laboratoryId: product.laboratoryId,
userName: user.name,
productName: product.name,
laboratoryName: laboratory.name,
facultyName: laboratory.faculty,
createdAt: new Date(),
}).save({ session });
await this.productsService.updateProductOutstanding(
createBookingDto.productId,
session,
);
}
async createCombosBookings(
bookingsArray: CreateBookingDto[],
session: mongoose.ClientSession,
): Promise<void> {
const promiseArray = [];
for (let i = 1; i < bookingsArray.length; i++) {
promiseArray.push(
this.combosService.createCombo(
{
productId1: bookingsArray[0].productId,
productId2: bookingsArray[i].productId,
},
session,
),
);
}
await Promise.all(promiseArray);
}
also this is how i create the connection element:
export class BookingService {
constructor(
#InjectModel(Booking.name) private readonly model: Model<BookingDocument>,
private readonly authService: AuthService,
private readonly bookingRepository: BookingRepository,
#InjectConnection()
private readonly connection: mongoose.Connection,
) {}

race problem with mocha unit testing Firestore onSnapshot, test ends before onSnapshot returns data

I have a function I'm testing. It's called from the constructor on an object.
constructor(authorID: string) {
this.loadRecipes(authorID)
}
private loadRecipes = (author_id: string) => {
const first = query(collection(db, "cookbook"), where("author_id", "==", author_id));
this._unsubscribe = onSnapshot(first, (querySnapshot) => {
this._recipes = [];
querySnapshot.forEach((doc) => {
this._recipes.push(new Recipe(doc.id, doc.data().author_id, "", "", -1, "draft", [], [], [], 0, ""));
});
});
};
I'm calling it from a mocha test:
it("Creates a recipe list by author ID", () => {
authorRecipes = new RecipeList(author_id);
assert(authorRecipes.list.length>0);
});
The code works fine, but the test fails. The problem is that the test completes running long before the onSnapshot ever fires and populates the list. Is there a way to force the query to populate - sort of like an async/await? Or a way to force mocha to wait? Setting a breakpoint & debugging, the onSnapshot is eventually called so the code works. I just cannot do any follow on tests because the data isn't there yet.
I ended up adding a "loading" flag to the class:
private loadRecipes = (author_id: string, _max?: number, _start?: number) => {
this._loading = true;
const first = query(collection(db, "cookbook"), where("author_id", "==", author_id));
this._unsubscribe = onSnapshot(first, (querySnapshot) => {
this._recipes = [];
querySnapshot.forEach((doc) => {
this._recipes.push(new Recipe(doc.id, doc.data().author_id, "", "", -1, "draft", [], [], [], 0, ""));
});
this._loading = false;
});
};
And then watched the flag with a delay in the test:
it("Creates a recipe list by author ID", async () => {
authorRecipes = new RecipeList(author_id);
while (authorRecipes.loading) {
await timeout(1000);
}
assert(!authorRecipes.loading);
});
Not super elegant, but gets the job done.

How to implement a node query resolver with apollo / graphql

I am working on implementing a node interface for graphql -- a pretty standard design pattern.
Looking for guidance on the best way to implement a node query resolver for graphql
node(id ID!): Node
The main thing that I am struggling with is how to encode/decode the ID the typename so that we can find the right table/collection to query from.
Currently I am using postgreSQL uuid strategy with pgcrytpo to generate ids.
Where is the right seam in the application to do this?:
could be done in the primary key generation at the database
could be done at the graphql seam (using a visitor pattern maybe)
And once the best seam is picked:
how/where do you encode/decode?
Note my stack is:
ApolloClient/Server (from graphql-yoga)
node
TypeORM
PostgreSQL
The id exposed to the client (the global object id) is not persisted on the backend -- the encoding and decoding should be done by the GraphQL server itself. Here's a rough example based on how relay does it:
import Foo from '../../models/Foo'
function encode (id, __typename) {
return Buffer.from(`${id}:${__typename}`, 'utf8').toString('base64');
}
function decode (objectId) {
const decoded = Buffer.from(objectId, 'base64').toString('utf8')
const parts = decoded.split(':')
return {
id: parts[0],
__typename: parts[1],
}
}
const typeDefs = `
type Query {
node(id: ID!): Node
}
type Foo implements Node {
id: ID!
foo: String
}
interface Node {
id: ID!
}
`;
// Just in case model name and typename do not always match
const modelsByTypename = {
Foo,
}
const resolvers = {
Query: {
node: async (root, args, context) => {
const { __typename, id } = decode(args.id)
const Model = modelsByTypename[__typename]
const node = await Model.getById(id)
return {
...node,
__typename,
};
},
},
Foo: {
id: (obj) => encode(obj.id, 'Foo')
}
};
Note: by returning the __typename, we're letting GraphQL's default resolveType behavior figure out which type the interface is returning, so there's no need to provide a resolver for __resolveType.
Edit: to apply the id logic to multiple types:
function addIDResolvers (resolvers, types) {
for (const type of types) {
if (!resolvers[type]) {
resolvers[type] = {}
}
resolvers[type].id = encode(obj.id, type)
}
}
addIDResolvers(resolvers, ['Foo', 'Bar', 'Qux'])
#Jonathan I can share an implementation that I have and you see what you think. This is using graphql-js, MongoDB and relay on the client.
/**
* Given a function to map from an ID to an underlying object, and a function
* to map from an underlying object to the concrete GraphQLObjectType it
* corresponds to, constructs a `Node` interface that objects can implement,
* and a field config for a `node` root field.
*
* If the typeResolver is omitted, object resolution on the interface will be
* handled with the `isTypeOf` method on object types, as with any GraphQL
* interface without a provided `resolveType` method.
*/
export function nodeDefinitions<TContext>(
idFetcher: (id: string, context: TContext, info: GraphQLResolveInfo) => any,
typeResolver?: ?GraphQLTypeResolver<*, TContext>,
): GraphQLNodeDefinitions<TContext> {
const nodeInterface = new GraphQLInterfaceType({
name: 'Node',
description: 'An object with an ID',
fields: () => ({
id: {
type: new GraphQLNonNull(GraphQLID),
description: 'The id of the object.',
},
}),
resolveType: typeResolver,
});
const nodeField = {
name: 'node',
description: 'Fetches an object given its ID',
type: nodeInterface,
args: {
id: {
type: GraphQLID,
description: 'The ID of an object',
},
},
resolve: (obj, { id }, context, info) => (id ? idFetcher(id, context, info) : null),
};
const nodesField = {
name: 'nodes',
description: 'Fetches objects given their IDs',
type: new GraphQLNonNull(new GraphQLList(nodeInterface)),
args: {
ids: {
type: new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(GraphQLID))),
description: 'The IDs of objects',
},
},
resolve: (obj, { ids }, context, info) => Promise.all(ids.map(id => Promise.resolve(idFetcher(id, context, info)))),
};
return { nodeInterface, nodeField, nodesField };
}
Then:
import { nodeDefinitions } from './node';
const { nodeField, nodesField, nodeInterface } = nodeDefinitions(
// A method that maps from a global id to an object
async (globalId, context) => {
const { id, type } = fromGlobalId(globalId);
if (type === 'User') {
return UserLoader.load(context, id);
}
....
...
...
// it should not get here
return null;
},
// A method that maps from an object to a type
obj => {
if (obj instanceof User) {
return UserType;
}
....
....
// it should not get here
return null;
},
);
The load method resolves the actual object. This part you would have work more specifically with your DB and etc...
If it's not clear, you can ask! Hope it helps :)

How to make GraphQL automatically insert current UTC upon mutation?

My mutation code looks like this:
Mutation: {
addPost: async (parent, args) => {
// Add new post to dbPosts
const task = fawn.Task();
task.save(
dbPost,
{
_id: new mongoose.Types.ObjectId(),
title: args.title,
content: args.content,
created: args.created,
author: {
id: args.author_id,
first_name: args.author_first_name,
last_name: args.author_last_name,
}
}
);
}
}
The schema I'm working with is defined as:
scalar DateTime
type Query {
posts: [Post],
post(id: ID!): Post,
}
type Mutation {
addPost(
title: String!,
content: String!,
created: DateTime!,
author_id: String!,
author_first_name: String!
author_last_name: String!): Post,
}
type Post {
id: ID!
title: String!,
content: String!,
author: Author!,
created: DateTime,
}
As apparent, I'm also using a custom scalar to handle date/time values. This custom scalar, DateTime resolves as:
const { GraphQLScalarType } = require('graphql/type');
const tmUTC = () => {
const tmLoc = new Date();
return tmLoc.getTime() + tmLoc.getTimezoneOffset() * 60000;
};
DateTime = new GraphQLScalarType({
name: 'DateTime',
description: 'Date/Time custom scalar type',
parseValue: () => { // runs on mutation
return tmUTC();
},
serialize: (value) => { // runs on query
return new Date(value.getTime());
},
parseLiteral: () => {
return tmUTC();
},
});
module.exports = DateTime;
Now this works fine and I'm able to insert and retrieve entries with the timestamp as expected. However, I still have to pass a dummy argument for the created field in order for the DateTime resolver to kick in:
mutation{
addPost(
title: "Ghostbusters",
content: "Lots and lots of ghosts here...",
created: "",
author_id: "5ba0c2491c9d440000ac8fc3",
author_first_name: "Bill",
author_last_name: "Murray"
){
title
content
id
created
}
}
I can even leave that field blank and the time will still get recorded. But I cannot just leave it out in my mutation call. Is there any way to achieve this? The objective here is to have GraphQL automatically execute the DateTime resolver without the user having to explicitly enter a created field in the mutation call.
in your mutation, remove the requirement for the created to be required
type Mutation {
addPost(
title: String!,
content: String!,
// created: DateTime!, changed in next line
created: DateTime, // no ! means not required
author_id: String!,
author_first_name: String!
author_last_name: String!): Post,
}
Then in your task merge in the created arg if it is not
addPost: async (parent, args) => {
// if args does not have created, make it here if it is required by task
const task = fawn.Task();
task.save(
dbPost,

BookshelfJs failing to bring in nested relationship on create

Let's say we have a Join table vehicle_inspections and another join table inspection_actions, as well as basic tables for actions, vehicles, andinspections`.
Lets say I desire the following DB entries:
vehicles
----------------------------
id make
----------------------------
1 Toyota
actions
-------------------------------
id description
-------------------------------
2 Check Tire Pressue
inspections
-------------------------------
id location date
-------------------------------
3 New York tomorrow
vehicle_inspections
--------------------------------
vehicle_id inspection_id
--------------------------------
1 3
inspection_actions
--------------------------------
inspection_id action_id
--------------------------------
3 2
and the following bookshelf classes
inspection_actions.js
(function () {
'use strict';
var Repository = require('../repository');
module.exports = Repository.Model.extend({
tableName: 'inspection_actions',
});
})();
vehicle_inspections.js
(function () {
'use strict';
var Repository = require('../repository');
module.exports = Repository.Model.extend({
tableName = 'vehicle_inspections',
inspection: function () {
return this.belongsTo(require('inspection'));
},
fetchOrCreate: function(vehicleId, inspectionId, options) {
var self = this;
return self.query(function (qb) {
qb.where({
vehicle_id: vehicleId,
inspection_id: inspectionId
});
)}.fetch(options || {}).then(function (model) {
if (!model) {
model.save({
vehicle_id: vehicleId,
inspection_id: inspectionId
});
return model;
};
}
};
});
inspection.js
...
module.exports = Repository.Model.extend(_.extend({
tableName: 'inspections',
actions: function () {
return this.hasMany(require('./inspection-action'));
}
}));
And a route:
new VehicleInspection().fetchOrCreate(req.params.vehicle_id, req.params.inspection_id, {withRelated: ['inspection.actions']})
.then(function (vehicleInspection) {
var inspection = vehicleInspection.related('inspection');
console.log( inspection);
console.log(inspection.related(actions);
})
The inspection console log prints out the correct inspection, however, irrelevantly of what is in the database the second console.log prints out an empty result
{ length: 0,
models: [],
_byId: {},
...
targetIdAttribute: 'id',
foreignKey: undefined,
parentId: undefined,
parentTableName: 'tasks',
parentIdAttribute: 'id',
parentFk: undefined } }
This "bad" behaviour only occurs the first time a projectTasks entry is being created. What appears to be happening is that the inspection_action table is not being populated through the nested withRelated. How could I get this working nested create working?
I'm not completely clear what you are trying to achieve, but here is how I would generally set things up. First I'd create a base model (assuming its saved as base.js), I think you are going to have some problems with circular dependencies, so using the Bookshelf registry plugin would be good:
var config = {
client: // whatever client you are using,
connection: // url to your database
};
var db = require('knex')(config);
var Bookshelf = require('bookshelf')(db);
var Base = Bookshelf.Model.extend({
// Put anything here that will be helpful for your use case
});
Bookshelf.plugin('registry');
Base.model = Bookshelf.model.bind(Bookshelf);
module.exports = Base;
Next create your Vehicle model:
require('inspection');
require('action');
var Base = require('base');
var Vehicle = Base.Model.extend({
tableName = 'vehicles',
inspections: function () {
return this.belongsToMany('Inspection',
'inspections_vehicles', 'vehicle_id', 'inspection_id');
},
actions: function() {
return this.belongsToMany('Action',
'actions_vehicles', 'vehicle_id', 'action_id');
}
};
module.exports = Base.model('Vehicle', Vehicle);
Then an inspection model:
require('vehicle');
var Base = require('base');
var Inspection = Base.Model.extend({
tableName = 'inspection',
vehicles: function () {
return this.belongsToMany('Vehicle',
'inspections_vehicles', 'inspection_id', 'vehicle_id');
}
};
module.exports = Base.model('Inspection', Inspection);
Finally an action model:
var Base = require('base');
var Action = Base.Model.extend({
tableName = 'actions',
};
module.exports = Base.model('Action', Action);
Now assuming that the database isn't already filled in with the data you supplied, we can populate it:
var Inspection = require('inspection');
var Vehicle = require('vehicle');
var Action = require('action');
var toyota;
var newYorkInspection
Vehicle.forge().save({name: 'Toyota'})
.then(function(vehicle) {
toyota = vehicle;
return Inspection.forge().save({location: 'New York', date: 'Tomorrow'});
}).then(function(inspection){
newYorkInspection = inspection;
return toyota.inspections().attach(newYorkInspection);
}).then(function() {
return Action.forge().save({description: 'Check Tire Pressure'});
}).then(function(tirePressureAction) {
return toyota.actions().attach(tirePressureAction);
});
Now I can fetch the toyota vehicle with the related actions and inspections:
var Vehicle = require('vehicle');
return Vehicle.forge({'name': 'Toyota'}).fetch({
withRelated: ['inspections', 'actions']
}).then(function(toyota){
var toyotaInspections = toyota.related('inspections');
var toyotaActions = toyota.related('actions');
});