Unknown error in postgress and knex migration - postgresql

I tried to run the created migration, but got an unknown error. I'm new to this type of work with databases. Maybe someone has encountered this problem? Script for migration: npx knex migrate:latest --knexfile ./app/database/knexfile.js. Maybe it has to do with the knex configuration, the paths specified in the configuration are correct. Seeeds are not used yet.
knexfile:
require('dotenv').config();
const db_user = process.env.DB_USER;
const db_password = process.env.DB_PASSWORD;
const db_name = process.env.DB_NAME;
/**
* #type { Object.<string, import("knex").Knex.Config> }
*/
module.exports = {
development: {
client: 'postgresql',
connection: {
database: `${db_name}`,
user: `${db_user}`,
password: `${db_password}`,
},
pool: {
min: 2,
max: 10,
},
migrations: {
tableName: 'knex_migrations',
directory: './migrations',
},
seeds: {
directory: './seeds',
},
},
};
Migration file:
// eslint-disable-next-line node/exports-style
exports.up = (knex) => {
return knex.schema
.createTable('users', (table) => {
table.uuid('id').primary();
table.string('name');
table.string('email');
table.sting('password');
})
.createTable('watchLists', (table) => {
table.uuid('id').primary();
table
.foreign('userId')
.references('id')
.inTable('users')
.onDelete('CASCADE');
table.string('name');
})
.createTable('films', (table) => {
table.uuid('id').primary();
table.string('name');
table.string('rate');
})
.createTable('addedFilms', (table) => {
table.uuid('id').primary();
table
.foreign('watchListId')
.references('id')
.inTable('watchList')
.onDelete('CASCADE');
table
.foreign('filmId')
.references('id')
.inTable('films')
.onDelete('CASCADE');
});
};
// eslint-disable-next-line node/exports-style
exports.down = (knex) => {
return knex.schema
.dropTableIfExists('addedFilms')
.dropTableIfExists('films')
.dropTableIfExists('users')
.dropTableIfExists('watchList');
};
Error:
������������ "undefined" �� ������ �������� ����������� (�� ������)
error: ������������ "undefined" �� ������ �������� ����������� (�� ������)
at Parser.parseErrorMessage (D:\All-Pet-Projects\effective-app\evgeny-trubish-node\node_modules\pg-protocol\dist\parser.js:287:98)
at Parser.handlePacket (D:\All-Pet-Projects\effective-app\evgeny-trubish-node\node_modules\pg-protocol\dist\parser.js:126:29)
at Parser.parse (D:\All-Pet-Projects\effective-app\evgeny-trubish-node\node_modules\pg-protocol\dist\parser.js:39:38)
at Socket.<anonymous> (D:\All-Pet-Projects\effective-app\evgeny-trubish-node\node_modules\pg-protocol\dist\index.js:11:42)
at Socket.emit (node:events:390:28)
at addChunk (node:internal/streams/readable:315:12)
at readableAddChunk (node:internal/streams/readable:289:9)
at Socket.Readable.push (node:internal/streams/readable:228:10)
at TCP.onStreamRead (node:internal/stream_base_commons:199:23)

Related

node-postgres error : there is no parameter $4

this is my first post , please be kind
I get "error: there is no parameter $4" when I try to run this code using node-postgres :
const updateUser = async (req, res) => {
const id = parseInt(req.params.id);
const {username, email, isadmin} = req.body;
console.log(id);
try{
pool.query(`UPDATE users SET username = $1, email = $2, isadmin = $3 WHERE id = $4`), [username, email, isadmin, id], (err, results) => {
if(err) {
throw err;
}
res.status(200).send(`User updated with ID: ${id}`)
}
} catch (err) {
throw err;
}
}
error message :
`
Server running on port 4001
7
C:\Users\Elisabeth\node_modules\pg-protocol\dist\parser.js:287
const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name);
^
error: there is no parameter $4
at Parser.parseErrorMessage (C:\Users\Elisabeth\node_modules\pg-protocol\dist\parser.js:287:98)
at Parser.handlePacket (C:\Users\Elisabeth\node_modules\pg-protocol\dist\parser.js:126:29)
at Parser.parse (C:\Users\Elisabeth\node_modules\pg-protocol\dist\parser.js:39:38)
at Socket.<anonymous> (C:\Users\Elisabeth\node_modules\pg-protocol\dist\index.js:11:42)
at Socket.emit (node:events:537:28)
at addChunk (node:internal/streams/readable:324:12)
at readableAddChunk (node:internal/streams/readable:297:9)
at Readable.push (node:internal/streams/readable:234:10)
at TCP.onStreamRead (node:internal/stream_base_commons:190:23) {
length: 94,
severity: 'ERROR',
code: '42P02',
detail: undefined,
hint: undefined,
position: '69',
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'parse_expr.c',
line: '845',
routine: 'transformParamRef'
}
`
I have checked and the id exists in the database
The param is logged in the console so it works

Why is there Failed Auth error when connecting fastify server to second mongoDB cluster?

I'm trying to add a database connection to a fastify server, for various reasons, this is the scenario, and there are two separate mongodb's that must be connected to.
index.js:
'use strict'
const dotenv = require('dotenv');
dotenv.config();
const path = require('path');
const tunnel = require('tunnel-ssh');
const Autoload = require('fastify-autoload');
const fastifyMongoose = require('fastify-mongoose-driver');
const fastify = require('fastify');
const { runTasks } = require('./taskRunner.js')
const { connectPerformanceDB } = require('./performanceDB.js')
const fastifyServer = fastify({ logger: process.env.DEVELOPMENT });
const User = require('./schemas/User');
const Promo = require('./schemas/Promo');
const Result = require('./schemas/PageSpeedResult')
fastifyServer.register(Autoload, { dir: path.join(__dirname, 'routes') });
const tunnelConfig = {
username: '*****',
host: '*****.com',
privateKey: require('fs').readFileSync('../../.ssh/id_rsa'),
srcPort: 27017,
dstPort: 27017
};
tunnel(tunnelConfig, error => {
if (error) console.log('SSH connection error: ', + error);
fastifyServer.register(fastifyMongoose, {
uri: process.env.MONGO_CONNECTION_STRING,
settings: {
user: '*************',
pass: '*************',
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
config: {
autoIndex: true
}
},
models: [User, Promo],
useNameAndAlias: true
});
fastifyServer.register(fastifyMongoose, {
uri: process.env.PERFORMANCE_DB_CONNECTION_STRING,
settings: {
user: '******',
pass: '**********',
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true,
config: {
autoIndex: true
}
},
models: [Result],
useNameAndAlias: true
})
fastifyServer.listen(process.env.PORT, (err, address) => {
if (err) throw err;
fastifyServer.log.info(`server listening on ${address}`);
});
});
The issue im running into is that this current code errors out like so:
index.js:102
if (err) throw err;
^
MongoError: Authentication failed.
the line it errors on is the if statement inside this block:
fastifyServer.listen(process.env.PORT, (err, address) => {
if (err) throw err;
fastifyServer.log.info(`server listening on ${address}`);
});
Confirmed user, pass, and database are correct.
the connect uri const looks like: mongodb://localhost/<name_of_cluster>, have also tried the connect string given in mongo's web application under the connect tab.
Any suggestions on the failed auth error or implementing a second DB cluster with fastify?
UPDATE:
ive tried multiple connection strings. both the one mentioned in the post, as well as mongodb://<username>:<pass>#pagespeedinsights-hjdvp.mongodb.net/<database>?retryWrites=true&w=majority and mongodb+srv://<username>:<pass>#pagespeedinsights-hjdvp.mongodb.net/<database>?retryWrites=true&w=majority
the first errors in the server with:
Error: getaddrinfo ENOTFOUND pagespeedinsights-hjdvp.mongodb.net
the second errors (from same line as initial line above) in server with:
index.js:102
if (err) throw err;
^
TypeError: schema[key].forEach is not a function
The problem depends on the plugin require('fastify-mongoose-driver') since this line is executed every time you will register this plugin:
await mongoose.connect(uri, settings);
Here the source code
So, that plugin uses a singleton as the connection for every registration overriding the first ones.
This is what happens under the hood:
const mongoose = require('mongoose')
// docker run -d -p 27018:27017 --rm -e MONGO_INITDB_ROOT_USERNAME=adminOne -e MONGO_INITDB_ROOT_PASSWORD=password --name mongoOne mongo:4
// docker run -d -p 27019:27017 --rm -e MONGO_INITDB_ROOT_USERNAME=adminTwo -e MONGO_INITDB_ROOT_PASSWORD=password --name mongoTwo mongo:4
;(async function () {
await mongoose.connect('mongodb://adminOne:password#localhost:27018')
console.log('connected 1')
await mongoose.connect('mongodb://adminTwo:password#localhost:27019')
console.log('connected 2')
const kittySchema = new mongoose.Schema({ name: String })
const Kitten = mongoose.model('Kitten', kittySchema)
// it will be saved on second connection
const silence = new Kitten({ name: 'Silence' })
silence.save(function (err, fluffy) {
if (err) return console.error(err)
console.log(fluffy)
})
})()
And this recreates the same error you have:
const fastify = require('fastify')()
fastify.register(require('fastify-mongoose-driver').plugin,
{
uri: 'mongodb://adminOne:password#localhost:27018',
settings: {
useNewUrlParser: true,
config: { autoIndex: true }
},
models: [{ name: 'Kittens', schema: { name: String } }]
}
)
fastify.register(require('fastify-mongoose-driver').plugin,
{
uri: 'mongodb://adminTwo:password#localhost:27019',
settings: {
useNewUrlParser: true,
config: { autoIndex: true }
},
models: [{ name: 'Dogs', schema: { name: String } }]
}
)
fastify.ready(err => {
console.log(err) // TypeError: schema[key].forEach is not a function
})
You should use the mongoose multiple connections configurations to archive your needs and implement/or wait and upgrade of that plugin to support this feature.

Express Sequelize Error - Cannot read property 'findAll' of undefined

I have successfully connected Sequelize and Express using Sequelize's github example with a few changes. I am now trying to do a simple Sequelize query to test the connection, but continue to receive an error stating that the model I have queried is not defined.
// ./models/index.js
...
const sequelize = new Sequelize(process.env.DB, process.env.DB_USER, process.env.DB_PASS, {
host: 'localhost',
dialect: 'postgres'
});
// Test SEQUELIZE connection
sequelize
.authenticate()
.then(() => {
console.log('Database connection has been established successfully.');
})
.catch(err => {
console.error('Unable to connect to the database:', err);
});
fs
.readdirSync(__dirname)
.filter(file => {
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
})
.forEach(file => {
var model = sequelize['import'](path.join(__dirname, file));
db[model.name] = model;
});
Object.keys(db).forEach(modelName => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
module.exports = db;
// ./routes/index.js
const models = require('../models');
const express = require('express');
const router = express.Router();
router.get('/contacts', (req, res) => {
models.Contact.findAll().then(contacts => {
console.log("All users:", JSON.stringify(contacts, null, 4));
});
});
module.exports = router;
// ./models/contact.js
const Sequelize = require('sequelize');
var Model = Sequelize.Model;
module.exports = (sequelize, DataTypes) => {
class Contact extends Model {}
Contact.init({
// attributes
firstName: {
type: Sequelize.STRING,
allowNull: false
},
lastName: {
type: Sequelize.STRING,
allowNull: false
}
}, {
sequelize,
modelName: 'contact'
// options
});
return Contact;
};
The error I am getting when using postman to hit /contacts with a GET request is:
[nodemon] starting `node server.js`
The server is now running on port 3000!
Executing (default): SELECT 1+1 AS result
Database connection has been established successfully.
TypeError: Cannot read property 'findAll' of undefined
at router.get (C:\Users\username\desktop\metropolis\metropolis-backend\routes\index.js:6:20)
You are not requiring the model properly.
In ./routes/index.js add the next line:
const Contact = require('./models/contact.js');
And then call Contact.findAll()...
Second approach:
You can gather all your models by importing them into a loader.js file which you will store in the models directory. The whole job of this module is to import the modules together to the same place and then export them from a single place.
It will look something like that:
// loader.js
const modelA = require('./modelA');
const modelB = require('./modelB');
const modelC = require('./modelC');
...
module.exports = {
modelA,
modelB,
modelC,
...
}
And then you can require it in the following way:
in router/index.js:
const Models = require('./models');
const contact = Models.Contact;

Cross schema joins with typeorm

Steps to reproduce:
Create 2 schema a and b
Create tables user, photo in both schema with these respective entities UserA PhotoA, UserB PhotoB
// PhotoA
import {Entity, Column, PrimaryGeneratedColumn, ManyToOne} from "typeorm";
import { User as UserB } from "./User.b";
import { User as UserA } from "./User.a";
#Entity({schema: "a"})
export class Photo {
#PrimaryGeneratedColumn()
id: number;
#Column({
length: 100
})
name: string;
#ManyToOne(type => UserA)
userA: UserA;
#ManyToOne(type => UserB)
userB: UserB;
}
// UserB
import {Entity, Column, PrimaryGeneratedColumn, OneToMany} from "typeorm";
import { Photo as PhotoA } from "./Photo.a";
import { Photo as PhotoB } from "./Photo.b";
#Entity({schema: "b"})
export class User {
#PrimaryGeneratedColumn()
id: number;
#Column({
length: 100
})
name: string;
#OneToMany(type => PhotoA, photo => photo.userB)
photosA: PhotoA[]
#OneToMany(type => PhotoB, photo => photo.userB)
photosB: PhotoB[]
}
Run this code
import "reflect-metadata";
import * as typeorm from "typeorm";
import { Photo as PhotoA } from "./entities/Photo.a";
import { User as UserB } from "./entities/User.b";
import { PostgresConnectionOptions } from "typeorm/driver/postgres/PostgresConnectionOptions";
import { Photo as PhotoB } from "./entities/Photo.b";
import { User as UserA } from "./entities/User.a";
class Inl {
public async test() {
const connection = await typeorm.createConnection({
type: "postgres",
host: "localhost",
port: 5433,
username: "test",
password: "test",
database: "test",
synchronize: true,
logging: true,
entities: [ PhotoA, PhotoB, UserA, UserB ]
} as PostgresConnectionOptions);
const photoARepo = connection.getRepository(PhotoA);
const userBRepo = connection.getRepository(UserB);
const userBRow = new UserB();
userBRow.name = "User in schema B";
const userBSavedRow = await userBRepo.save(userBRow);
const photoARow = new PhotoA();
photoARow.name = "Photo in schema A";
photoARow.userB = userBSavedRow;
await photoARepo.save(photoARow);
const photoBRow = new PhotoB();
photoBRow.name = "Photo in schema B";
photoBRow.userB = userBSavedRow;
await photoARepo.save(photoARow);
const result = await userBRepo
.createQueryBuilder("userB")
.select("*")
.leftJoinAndSelect("a.photo", "photosA")
.leftJoinAndSelect("b.photo", "photosB")
.where({id: userBSavedRow.id})
.getOne();
console.log(result);
}
}
new Inl().test();
RESULTS
query: INSERT INTO "a"."photo"("name", "userAId", "userBId") VALUES ($1, DEFAULT, $2) RETURNING "id" -- PARAMETERS: ["Photo in schema A",6]
query: COMMIT
query: SELECT "Photo"."id" AS "Photo_id", "Photo"."name" AS "Photo_name", "Photo"."userAId" AS "Photo_userAId", "Photo"."userBId" AS "Photo_userBId" FROM "a"."photo" "Photo" WHERE "Photo"."id" IN ($1) -- PARAMETERS: [6]
(node:527) UnhandledPromiseRejectionWarning: Error: "a" alias was not found. Maybe you forgot to join it?
at QueryExpressionMap.findAliasByName (/home/lewis/Projects/internationalisation/src/query-builder/QueryExpressionMap.ts:341:19)
at JoinAttribute.getValue (/home/lewis/Projects/internationalisation/src/query-builder/JoinAttribute.ts:146:72)
at JoinAttribute.get [as relation] (/home/lewis/Projects/internationalisation/src/query-builder/JoinAttribute.ts:162:53)
at JoinAttribute.get [as metadata] (/home/lewis/Projects/internationalisation/src/query-builder/JoinAttribute.ts:175:18)
at SelectQueryBuilder.join (/home/lewis/Projects/internationalisation/src/query-builder/SelectQueryBuilder.ts:1299:27)
at SelectQueryBuilder.leftJoin (/home/lewis/Projects/internationalisation/src/query-builder/SelectQueryBuilder.ts:284:14)
at SelectQueryBuilder.leftJoinAndSelect (/home/lewis/Projects/internationalisation/src/query-builder/SelectQueryBuilder.ts:364:14)
at Inl.test (/home/lewis/Projects/internationalisation/index.ts:42:14)
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:160:7)
As you can see from the log, I'm having the error alias was not found with the above code. Anybody has hints on this?
The problem here is that your are mixing up schema and alias in leftJoinAndSelect (the schema is resolved by TypeOrm, you don't need to specify this in your query if your entity is correctly configured). So this should work:
const result = await userBRepo
.createQueryBuilder("userB")
.leftJoinAndSelect("userB.photosA", "photosA")
.leftJoinAndSelect("userB.photosB", "photosB")
.where({id: userBSavedRow.id})
.getOne();

How to connect to MongoDb with Hapi.js?

Does anyone know how to connect to MongoDb while using the Hapi.js framework?
I have only managed to find one example (https://github.com/Marsup/hapi-mongodb), but this requires using a plugin and has no comments in the code!
Does anyone know of a simpler way?
The following (using mongoose) works pretty well for me:
var Hapi = require('hapi');
var mongoose = require("mongoose");
var server = new Hapi.Server();
server.connection({ port: 3000 });
var dbUrl = 'mongodb://localhost:27017/mydb';
var dbOtions = {
db: { native_parser: true },
server: { poolSize: 5 }
};
server.register([ /* plugins */], function (err) {
if (err) {
throw err; // something bad happened loading the plugins
}
// ... Register the routes
server.start(function () {
mongoose.connect(dbUrl, dbOtions, function(err) {
if (err) server.log('error', err);
});
});
});
I use a hapi plugin that I wrote that connects to mongo, handles errors to log and adds bluebird promises.
'use strict';
var bluebird = require('bluebird');
var mongoose = bluebird.promisifyAll(require('mongoose'));
exports.register = function(plugin, options, next) {
mongoose.connect(options.mongo.uri, options.mongo.options, function (e) {
if (e) {
plugin.log(['error', 'database', 'mongodb'], 'Unable to connect to MongoDB: ' + e.message);
process.exit();
}
mongoose.connection.once('open', function () {
plugin.log(['info', 'database', 'mongodb'], 'Connected to MongoDB # ' + options.mongo.uri);
});
mongoose.connection.on('connected', function () {
plugin.log(['info', 'database', 'mongodb'], 'Connected to MongoDB # ' + options.mongo.uri);
});
mongoose.connection.on('error', function (e) {
plugin.log(['error', 'database', 'mongodb'], 'MongoDB ' + e.message);
});
mongoose.connection.on('disconnected', function () {
plugin.log(['warn', 'database', 'mongodb'], 'MongoDB was disconnected');
});
});
return next();
};
exports.register.attributes = {
name: 'mongoose',
version: '1.0.0'
};
Blog post on user authentication with passport and Mongoose
Also be aware that Hapi's model is based off of plugins so read and re-read the docs on building your own.
Visit http://cronj.com/blog/hapi-mongoose
Complete sample project which can help you Repo Link https://github.com/gauravgupta90/Hapi-Mongoose-Angular
Config.js
module.exports = {
server: {
host: '0.0.0.0',
port: 8000
},
database: {
host: '127.0.0.1',
port: 27017,
db: 'DatabaseName',
username: '',
password: ''
}
};
Server.js
var Hapi = require('hapi'),
Routes = require('./routes'),
config = require('./config'),
Db = require('./database');
var server = Hapi.createServer(config.server.host, config.server.port, {
cors: true
});
server.route(Routes.endpoints);
server.start(function() {
console.log('Server started ', server.info.uri);
});
Database.js
var Mongoose = require('mongoose'),
config = require('./config');
Mongoose.connect('mongodb://' + config.database.host + '/' + config.database.db);
var db = Mongoose.connection;
db.on('error', console.error.bind(console, 'connection error'));
db.once('open', function callback() {
console.log("Connection with database succeeded.");
});
exports.Mongoose = Mongoose;
exports.db = db;
The following works for me:
const Hapi = require("hapi");
const Mongoose = require("mongoose");
const server = new Hapi.Server({ "host": "localhost", "port": 3000 });
Mongoose.connect('mongodb://localhost:27017/testdb', { useNewUrlParser: true }, (err) => {
if (!err) { console.log('MongoDB Connection Succeeded.') }
else { console.log(`Error in DB connection : ${err}`)}
});