I have an error when connecting to the MongoDB Atlas database after I migrated the data from the old MLAB.
I have definitely setup the username and password correctly as in the documentation (obviously i replaced PASSWORD with my correct MLAB password:
var mongoURI = 'mongodb+srv://heroku_3kcdl3j9:PASSWORD#cluster-3kcdl3j9.auof1.mongodb.net/heroku_3kcdl3j9?retryWrites=true&w=majority';
I have migrated my database from MLAB to Atlas successfully set the correct Network access settings to 0.0.0.0 IP address. Setup the environment variable in Heroku.
I connect to the atlas database with this code, do I need some special options? (this code works with the old MLAB connection)
mongoose.connect(mongoURI,
// { config: { autoIndex: true } },
// { options : { ssl: true } },
function (error) {
if (error) console.error(error);
else console.log('mongo connected');
const con = new mongoose.mongo.Admin(mongoose.connection.db)
con.buildInfo( (err, mongoURI) => {
if(err){
throw err
}
// see the db version
// console.log('mongo db.version(): '+ db.version);
})
});
However, I still get this error, I don't know what I am doing wrong:
{ MongoError: authentication fail
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/topologies/replset.js:1462:15
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:868:7
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:844:20
at finish (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:232:16)
at handleEnd (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:242:7)
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:351:15
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:531:18
at process._tickCallback (internal/process/next_tick.js:61:11)
name: 'MongoError',
message: 'authentication fail',
errors:
[ { name: 'cluster-3kcdl3j9-shard-00-01.auof1.mongodb.net:27017',
err: [MongoError] },
{ name: 'cluster-3kcdl3j9-shard-00-00.auof1.mongodb.net:27017',
err: [MongoError] },
{ name: 'cluster-3kcdl3j9-shard-00-02.auof1.mongodb.net:27017',
err: [MongoError] } ],
[Symbol(mongoErrorContextSymbol)]: {} }
(node:47015) UnhandledPromiseRejectionWarning: TypeError: Cannot read property 's' of undefined
at Admin.buildInfo (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb/lib/admin.js:100:37)
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/index.js:95:13
at $initialConnection.then.err (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongoose/lib/connection.js:556:14)
at process._tickCallback (internal/process/next_tick.js:68:7)
(node:47015) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:47015) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
(node:47015) UnhandledPromiseRejectionWarning: MongoError: authentication fail
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/topologies/replset.js:1462:15
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:868:7
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:844:20
at finish (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:232:16)
at handleEnd (/Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:242:7)
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/auth/scram.js:351:15
at /Users/bensmith/Downloads/DocumentsDirNew/Scraper and API/diveapi/node_modules/mongodb-core/lib/connection/pool.js:531:18
at process._tickCallback (internal/process/next_tick.js:61:11)
(node:47015) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 2)
On the migration documentation from MLAB to ATLAS I can read:
Atlas servers always run with requireSSL and only accept TLS/SSL encrypted connections.
I can see that your ssl option is commented.
This is one problem.
The problem was it was something to do with the username and password. I believe there was an illegal character in the password. So I created another user with a better password and the server authenticated and logged in allowing it to connect.
Related
I have created a free cluster for mongodb atlas. Under that cluster I have created a database and a collection to test things out. As I was using node js driver 4.0 or above, this is the connection string I got:
mongodb+srv://myName:myPass#cluster0.2sjva.mongodb.net/myDb?retryWrites=true&w=majority
This is my backend code:
const { MongoClient } = require("mongodb");
// Connection URI
const uri =
"mongodb+srv://myName:myPass#cluster0.2sjva.mongodb.net/myDb?retryWrites=true&w=majority";
// Create a new MongoClient
const client = new MongoClient(uri);
async function run() {
try {
// Connect the client to the server
await client.connect();
// Establish and verify connection
await client.db("myDb").command({ ping: 1 });
console.log("Connected successfully to server");
} finally {
// Ensures that the client will close when you finish/error
await client.close();
}
}
run().catch(console.dir);
But when I run this code, I get this following error:
Error: querySrv ECONNREFUSED _mongodb._tcp.cluster0.2sjva.mongodb.net
at QueryReqWrap.onresolve [as oncomplete] (dns.js:210:19) {
errno: undefined,
code: 'ECONNREFUSED',
syscall: 'querySrv',
hostname: '_mongodb._tcp.cluster0.2sjva.mongodb.net'
}
Here is some information I can provide:
I have whitelisted my current Ip address, After this error I used 0.0.0.0. But the problem persists.
Os: Arch linux
I have also tried to use this connection string for older driver version: mongodb://... and that worked.
My question: why connection string for node js driver version > 4.0 doesn't work in my case?
Thank you for reading!
I'm trying to write into Atlas mongoDB cloud service with dart package mongo_dart.
This is how to connect to:
static const String uri =
"mongodb+srv://User:Password#Project.e0nro.mongodb.net/DbName?retryWrites=true&w=majority&tls=true";
Future<Db> connectToDatabase() async {
var db = await Db.create(uri);
await db.open(secure: true);
print('Connected to database');
return db;
}
And this is how to update my data:
await coll.update(
{
"symbol": symbol,
},
{
r"$addToSet": {"pens": pen.toJson()}
},
);
I've got this error rapidly:
Unhandled exception:
MongoDB ConnectionException: connection closed: The socket connection has been reset by peer.
Possible causes:
- Trying to connect to an ssl/tls encrypted database without specifiyng
either the query parm tls=true or the secure=true parameter in db.open()
- Others
And sometimes this one:
Unhandled exception:
MongoDB ConnectionException: connection closed: SocketException: Read failed (OS Error: Connection reset by peer, errno = 104), address = ????.e0nro.mongodb.net, port = 52508
In Atlas panel I added the IP address to access the database globally
0.0.0.0/0 (includes your current IP address) access Active
How can I solve these errors?
the issue:
This what happens when I curl the endpoint http://ip/getuser/
UnhandledPromiseRejectionWarning: MongoParseError: URI malformed, cannot be parsed
at parseConnectionString (/app/node_modules/mongodb/lib/core/uri_parser.js:542:21)
at connect (/app/node_modules/mongodb/lib/operations/connect.js:272:3)
at cb (/app/node_modules/mongodb/lib/mongo_client.js:221:5)
at maybePromise (/app/node_modules/mongodb/lib/utils.js:714:3)
at MongoClient.connect (/app/node_modules/mongodb/lib/mongo_client.js:217:10)
at Function.MongoClient.connect (/app/node_modules/mongodb/lib/mongo_client.js:427:22)
at exports.IsCardActivated (/app/routes/index.js:47:45)
at Layer.handle [as handle_request] (/app/node_modules/express/lib/router/layer.js:95:5)
at next (/app/node_modules/express/lib/router/route.js:137:13)
at Route.dispatch (/app/node_modules/express/lib/router/route.js:112:3)
(node:1) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing in
side of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (re
jection id: 2)
(node:1) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejectio
ns that are not handled will terminate the Node.js process with a non-zero exit code.
router/getuserbydetails
const MongoClient = require('mongodb').MongoClient;
require('dotenv').config()
function connectUrl() {
return process.env.url;
}
exports.GetUserDetails = async (req, res) => {
....
if (!client) client = await MongoClient.connect(connectUrl(), { useNewUrlParser: true })
const db = client.db(getDatabaseName());
....
};
./index.js
var user= require('./routes/user/index');
app.get('/getuser', user.getuser);
Dockerfile
FROM node:10
ENV NODE_ENV="production"
COPY . /app
WORKDIR /app
RUN npm install
ENV PORT 80
EXPOSE 80
CMD [ "node", "index.js" ]
Google Platform: GCE instance setting:
Note:
firewall:
allow 443, 80
using LB on in front of the application.
This application works fine locally I'm wondering where it might be going wrong.
My sails application not able to connect to locally running mongo db instance
local.js is like below
module.exports.connections = {
sailsMongoDBServer: {
adapter: 'sails-mongo',
host: '127.0.0.1',
port: 27017,
database: 'dp-manager-db'
}
}
Error :
2019-03-06T12:32:01.081Z - error:
{ message:
'The hook `orm` is taking too long to load.\nMake sure it is triggering its `initialize()` callback, or else set `sails.config.orm._hookTimeout to a higher value (currently 20000)',
stack:
'Error: The hook `orm` is taking too long to load.\nMake sure it is triggering its `initialize()` callback, or else set `sails.config.orm._hookTimeout to a higher value (currently 20000)\n at Timeout.tooLong [as _onTimeout] (E:\\workspace_DpForm\\Eclipse\\dpm-app\\node_modules\\sails\\lib\\app\\private\\loadHooks.js:85:21)\n at ontimeout (timers.js:436:11)\n at tryOnTimeout (timers.js:300:5)\n at listOnTimeout (timers.js:263:5)\n at Timer.processTimers (timers.js:223:10)',
code: 'E_HOOK_TIMEOUT' }
2019-03-06T12:32:03.521Z - error: A hook (`orm`) failed to load!
E:\workspace_DpForm\Eclipse\dpm-app\node_modules\mongodb\lib\mongo_client.js:224
throw err
I'm currently running openSuse on an rPi3B+ (aarch64) and have hit a wall running a NodeJS connection script.
I went through the standard install of PostgreSQL (v10 is what is offered on this version of openSuse) then created a new role with
CREATE ROLE new_role LOGIN PASSWORD 'passwd';
and then a db with
CREATE DATABASE new_db OWNER new_role;
Both the \l & \du return the expected outputs show that both the role and db have been created successfully with the correct owner.
So I then quickly created a node project directory and copied the test script from the docs: https://node-postgres.com/features/connecting
const { Pool, Client } = require('pg')
const connectionString = 'postgresql://new_role:passwd#localhost:5432/new_db'
const pool = new Pool({
connectionString: connectionString,
})
pool.query('SELECT NOW()', (err, res) => {
console.log(err, res)
pool.end()
})
const client = new Client({
connectionString: connectionString,
})
client.connect()
client.query('SELECT NOW()', (err, res) => {
console.log(err, res)
client.end()
})
This returns a few broken promise errors that I haven't caught(.cath()) correctly yet, and an error code of 28000 that looks like this:
{ error: Ident authentication failed for user "new_role"
at Connection.parseE (/home/eru/postgresDB/node_modules/pg/lib/connection.js:554:11)
at Connection.parseMessage (/home/eru/postgresDB/node_modules/pg/lib/connection.js:379:19)
at Socket.<anonymous> (/home/eru/postgresDB/node_modules/pg/lib/connection.js:119:22)
at Socket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
at Socket.Readable.push (_stream_readable.js:219:10)
at TCP.onStreamRead [as onread] (internal/stream_base_commons.js:94:17)
name: 'error',
length: 99,
severity: 'FATAL',
code: '28000',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'auth.c',
line: '328',
routine: 'auth_failed' } undefined
So I'm pretty sure the attempt made it to the intended port otherwise I wouldn't have received the detailed error in terminal. The error code = invalid_authorization_specification
Is the there something I need to do on the server ,psql interface, that will fulfill the authorization specification?
When I've looked into that specific one I can't seem to find useful search results relevant to my situation.
Fairly new to postgres here so I'm sure this is a pretty noob mistake that I'm missing but any help or input is very appreciated!
Found an answer after a little more digging here: error: Ident authentication failed for user
Ended up editing my pg_hba.conf from the ident method to md5
This is rather crude because I don't really understand what I changed aside from telling postgreSQL to check the md5 encrypted password instead of checking if my username matched the roles created on the server.
If anyone has a proper explanation for what's changed and why I'm all ears.