/usr/local/lib/node/.npm/mongoose/0.0.5/package/lib/util.js:40
continue;
^^^^^^^^
node.js:68
throw e; // process.nextTick error, or 'error' event on first tick
^
SyntaxError: Illegal continue statement
at Module._compile (node.js:418:29)
at Object..js (node.js:429:14)
at Module.load (node.js:355:35)
at Function._load (node.js:322:14)
at require (node.js:367:23)
at Object.<anonymous> (/usr/local/lib/node/.npm/mongoose/0.0.5/package/lib/model.js:2:13)
at Module._compile (node.js:423:30)
at Object..js (node.js:429:14)
at Module.load (node.js:355:35)
at Function._load (node.js:322:14)
And the code is just:
var mongoose = require('mongoose').Mongoose;
mongoose.model('User', {
properties: ['user', 'pass', 'widgets' ],
indexes: [ { 'user' : 1 } , { unique : true } ],
});
.......
/usr/local/lib/node/.npm/mongoose/0.0.5/package/lib/util.js:40
else {
// Prevent never-ending loop
if (target === d.value) {
continue;
}
even if i comment the continue statement, causes another error log:
node.js:68
throw e; // process.nextTick error, or 'error' event on first tick
^
Error: ECONNREFUSED, Connection refused
at Socket._onConnect (net.js:548:18)
at IOWatcher.onWritable [as callback] (net.js:165:12)
Any idea?
/usr/local/lib/node/.npm/mongoose/0.0.5/package/lib/util.js
Edit this file and change line 40 from continue; to return;
In Array.prototype.forEach, continue is not supported (you can use the example below to test return vs continue in firebug's console)
[1,2,3,4].forEach(function(val, arr) {
if(val != 1) {
console.log(val);
return;
//continue;
}
console.log('here');
});
ECONNREFUSED is because your mongod is not running - where's the connect string?
Related
When we set idle_in_transaction_session_timeout, the database will terminate connections that are idle for some time.
This works as expected, but I wonder how we should deal with this situation in the aplication code.
We are using pg-promise 10.3.1.
Details of the test:
we set the connection pool size to 1, so that we only have a single session
we set the for the idle-transaction-session-timeout to 2.5 sec:
SET idle_in_transaction_session_timeout TO 2500
now the active transaction will be in state idle in transaction:
see What can cause “idle in transaction” for “BEGIN” statements
now we start a transaction and sleep for 5 seconds
after 2.5sec the database will terminate the session and send an error to the client:
pgp-error error: terminating connection due to idle-in-transaction timeout
after another 2.5sec the transactional code tries to send a query (via the already terminated session), and this fails as expected:
dbIdle failed Error: Client has encountered a connection error and is not queryable
then pg-promise will try to rollback the transaction which will also fail (also expected, I guess)
But now we start a new query and also this query fails with
dbCall failed Client has encountered a connection error and is not queryable
is this expected? I was hoping that pg-promise can somehow remove the "broken" connection from the pool and that we could get a new one
obvously this is not the case, so how should we deal with this situation: i.e. how to recover, so that we can send new queries to the database?
Code example:
import pgPromise, { IMain } from "pg-promise";
import * as dbConfig from "./db-config.json";
import { IConnectionParameters } from "pg-promise/typescript/pg-subset";
const cll = "pg";
console.time(cll);
const pgp: IMain = pgPromise({
query(e) {
console.timeLog(cll,`> ${e.query}`);
},
error(e, ctx) {
console.timeLog(cll,"pgp-error", e);
}
});
const connectParams: IConnectionParameters = {
...dbConfig,
application_name: "pg-test",
max: 1
};
const db = pgp(connectParams);
/**
* #param timeoutMs 0 is no timeout
*/
async function setDbIdleInTxTimeout(timeoutMs: number = 0) {
await db.any("SET idle_in_transaction_session_timeout TO $1;", timeoutMs);
}
async function dbIdle(sleepTimeSec: number) {
console.timeLog(cll, `starting db idle ${sleepTimeSec}`);
const result = await db.tx(async t => {
await new Promise(resolve => setTimeout(resolve, sleepTimeSec * 1000));
return t.one("Select $1 as sleep_sec", sleepTimeSec);
});
console.timeLog(cll, result);
}
async function main() {
await setDbIdleInTxTimeout(2500);
try {
await dbIdle(5);
} catch (e) {
console.timeLog(cll, "dbIdle failed", e);
}
try {
await db.one("Select 1+1 as res");
} catch (e) {
console.timeLog(cll, "dbCall failed", e);
}
}
main().finally(() => {
pgp.end();
});
Console output (removed some useless lines):
"C:\Program Files\nodejs\node.exe" D:\dev_no_backup\pg-promise-tx\dist\index.js
pg: 23.959ms > SET idle_in_transaction_session_timeout TO 2500;
pg: 28.696ms starting db idle 5
pg: 29.705ms > begin
pg: 2531.247ms pgp-error error: terminating connection due to idle-in-transaction timeout
at TCP.onStreamRead (internal/stream_base_commons.js:182:23) {
name: 'error',
severity: 'FATAL',
code: '25P03',
}
pg: 2533.569ms pgp-error Error: Connection terminated unexpectedly
pg: 5031.091ms > Select 5 as sleep_sec
pg: 5031.323ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5031.489ms > rollback
pg: 5031.570ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5031.953ms dbIdle failed Error: Client has encountered a connection error and is not queryable
pg: 5032.094ms > Select 1+1 as res
pg: 5032.164ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5032.303ms dbCall failed Error: Client has encountered a connection error and is not queryable
Process finished with exit code 0
This issue #680 has been fixed in pg-promise 10.3.5
I'm recursively loading a tree stored in Firestore. The tree has ~79 nodes.
Very occasionally I'm getting this error (about one in ten full-tree loads).
Edit: the code: https://github.com/karol-depka/OrYoL
Edit: the example deployed: https://oryol.karoldepka.com/tree (sorry, no plunker for now, just this)
Details below.
Firebase version in package.json: 4.5.0
Questions:
Where can I get more info than this basic documentation https://firebase.google.com/docs/reference/js/firebase.FirebaseError
?
What is the source of the problem and how to fix it?
3VM724:27 Uncaught Error in onSnapshot: Error: transaction closed
at new FirestoreError (error.js:164)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:126)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:517)
at PersistentListenStream.webpackJsonp.../../../../firebase/firestore/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:334)
at persistent_stream.js:270
at persistent_stream.js:247
at async_queue.js:81
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
The code:
private processNodeEvents(nestLevel: number, snapshot: any, parents, listener: DbTreeListener) {
const serviceThis = this
snapshot.docChanges.forEach(function(change) {
let data = change.doc.data()
if (change.type === 'added') {
const parentsPath = serviceThis.nodesPath(parents)
console.log('node: ', nestLevel, parentsPath, data);
serviceThis.pendingListeners ++
data.node.onSnapshot(targetNodeDoc => {
serviceThis.pendingListeners --
listener.onNodeAdded(
new NodeAddEvent(parentsPath, parentsPath[parentsPath.length - 1], targetNodeDoc, targetNodeDoc.id,
serviceThis.pendingListeners))
console.log('target node:', nestLevel, targetNodeDoc)
console.log('target node title:', nestLevel, targetNodeDoc.data().title)
const subCollection = targetNodeDoc.ref.collection('subNodes')
console.log('subColl:', subCollection)
subCollection.onSnapshot((subSnap: QuerySnapshot) => {
const newParents = parents.slice(0)
newParents.push(targetNodeDoc.ref)
serviceThis.processNodeEvents(nestLevel + 1, subSnap, newParents, listener)
})
})
// console.log('root node ref: ', targetNode);
}
if (change.type === 'modified') {
console.log('Modified city: ', data);
}
if (change.type === 'removed') {
console.log('Removed city: ', data);
}
})
}
Edit: another error discovered, by running the code multiple times:
VM3343:27 Uncaught Error in onSnapshot: Error: The referenced transaction has expired or is no longer valid.
at new FirestoreError (error.js:164)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:126)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:517)
at PersistentListenStream.webpackJsonp.../../../../firebase/firestore/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:334)
at persistent_stream.js:270
at persistent_stream.js:247
at async_queue.js:81
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
window.console.error # VM3343:27
Edit: update firebase to 4.6.0, problem happened 3 times:
Uncaught Error in onSnapshot: Error: transaction closed
at new FirestoreError (error.js:149)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:93)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:536)
at PersistentListenStream.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:309)
at persistent_stream.js:246
at persistent_stream.js:222
at async_queue.js:62
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
Edit: even if the error happens, the tree seems to continue loading.
Edit: another version of the error, with code=aborted (after upgrading firebase to 4.6.0):
Error in onSnapshot: FirebaseError: [code=aborted]: The referenced transaction has expired or is no longer valid.
/vendor.bundle.js:18588 errHandler()
/vendor.bundle.js:33367
/polyfills.bundle.js:2970 ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invokeTask()
/vendor.bundle.js:107276 Object.onInvokeTask()
/polyfills.bundle.js:2969 ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invokeTask()
/polyfills.bundle.js:2737 Zone.webpackJsonp.../../../../zone.js/dist/zone.js.Zone.runTask()
/polyfills.bundle.js:3044 webpackJsonp.../../../../zone.js/dist/zone.js.ZoneTask.invokeTask()
/polyfills.bundle.js:3033 ZoneTask.invoke()
I have more or less the same and it is random when it works and don't. I don't use snapshot, but valueChanges
ERROR Error: transaction closed
at new FirestoreError (error.js:149)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:93)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:536)
at PersistentListenStream.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:309)
at persistent_stream.js:246
at persistent_stream.js:222
at async_queue.js:62
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
For me it's this peace of code that provokes the error:
return Observable.forkJoin(entries.map(entry => {
return this.getPick(entry)
}))
each entry in entries is used to return a single pick from firestore.
private getPick(entryId: number) {
return this.afs.collection<Pick>('entry/' + entryId + '/event/' + '9/' + 'picks', ref => ref.where('is_captain','==',true))
.valueChanges()
I don't mean to hijack your thread, but I feel this is very relevant. If I change the forkJoin to
Observable.forkJoin(entries.slice(0,20)...
then it works, so I guess it is some kind of overload of queries.
I am working on a Sails Project. I just updated my version of nodeJS to v5.0.0 and now when I run sails lift on my app. I get :
/Users/davidgeismar/wefootpostgres/node_modules/bindings/bindings.js:83
throw e
^
Error: Module version mismatch. Expected 47, got 46.
at Error (native)
at Object.Module._extensions..node (module.js:450:18)
at Module.load (module.js:356:32)
at Function.Module._load (module.js:311:12)
at Module.require (module.js:366:17)
at require (module.js:385:17)
at bindings (/Users/davidgeismar/wefootpostgres/node_modules/bindings/bindings.js:76:44)
at Object. (/Users/davidgeismar/wefootpostgres/node_modules/bcrypt/bcrypt.js:3:35)
at Module._compile (module.js:425:26)
at Object.Module._extensions..js (module.js:432:10)
at Module.load (module.js:356:32)
at Function.Module._load (module.js:311:12)
at Module.require (module.js:366:17)
at require (module.js:385:17)
at Object. (/Users/davidgeismar/wefootpostgres/api/services/PaiementService.js:2:14)
at Module._compile (module.js:425:26)
the binding.js file looks like this :
/**
* Module dependencies.
*/
var fs = require('fs')
, path = require('path')
, join = path.join
, dirname = path.dirname
, exists = fs.existsSync || path.existsSync
, defaults = {
arrow: process.env.NODE_BINDINGS_ARROW || ' → '
, compiled: process.env.NODE_BINDINGS_COMPILED_DIR || 'compiled'
, platform: process.platform
, arch: process.arch
, version: process.versions.node
, bindings: 'bindings.node'
, try: [
// node-gyp's linked version in the "build" dir
[ 'module_root', 'build', 'bindings' ]
// node-waf and gyp_addon (a.k.a node-gyp)
, [ 'module_root', 'build', 'Debug', 'bindings' ]
, [ 'module_root', 'build', 'Release', 'bindings' ]
// Debug files, for development (legacy behavior, remove for node v0.9)
, [ 'module_root', 'out', 'Debug', 'bindings' ]
, [ 'module_root', 'Debug', 'bindings' ]
// Release files, but manually compiled (legacy behavior, remove for node v0.9)
, [ 'module_root', 'out', 'Release', 'bindings' ]
, [ 'module_root', 'Release', 'bindings' ]
// Legacy from node-waf, node <= 0.4.x
, [ 'module_root', 'build', 'default', 'bindings' ]
// Production "Release" buildtype binary (meh...)
, [ 'module_root', 'compiled', 'version', 'platform', 'arch', 'bindings' ]
]
}
/**
* The main `bindings()` function loads the compiled bindings for a given module.
* It uses V8's Error API to determine the parent filename that this function is
* being invoked from, which is then used to find the root directory.
*/
function bindings (opts) {
// Argument surgery
if (typeof opts == 'string') {
opts = { bindings: opts }
} else if (!opts) {
opts = {}
}
opts.__proto__ = defaults
// Get the module root
if (!opts.module_root) {
opts.module_root = exports.getRoot(exports.getFileName())
}
// Ensure the given bindings name ends with .node
if (path.extname(opts.bindings) != '.node') {
opts.bindings += '.node'
}
var tries = []
, i = 0
, l = opts.try.length
, n
, b
, err
for (; i<l; i++) {
n = join.apply(null, opts.try[i].map(function (p) {
return opts[p] || p
}))
tries.push(n)
try {
b = opts.path ? require.resolve(n) : require(n)
if (!opts.path) {
b.path = n
}
return b
} catch (e) {
if (!/not find/i.test(e.message)) {
throw e
}
}
}
err = new Error('Could not locate the bindings file. Tried:\n'
+ tries.map(function (a) { return opts.arrow + a }).join('\n'))
err.tries = tries
throw err
}
module.exports = exports = bindings
/**
* Gets the filename of the JavaScript file that invokes this function.
* Used to help find the root directory of a module.
* Optionally accepts an filename argument to skip when searching for the invoking filename
*/
exports.getFileName = function getFileName (calling_file) {
var origPST = Error.prepareStackTrace
, origSTL = Error.stackTraceLimit
, dummy = {}
, fileName
Error.stackTraceLimit = 10
Error.prepareStackTrace = function (e, st) {
for (var i=0, l=st.length; i<l; i++) {
fileName = st[i].getFileName()
if (fileName !== __filename) {
if (calling_file) {
if (fileName !== calling_file) {
return
}
} else {
return
}
}
}
}
// run the 'prepareStackTrace' function above
Error.captureStackTrace(dummy)
dummy.stack
// cleanup
Error.prepareStackTrace = origPST
Error.stackTraceLimit = origSTL
return fileName
}
/**
* Gets the root directory of a module, given an arbitrary filename
* somewhere in the module tree. The "root directory" is the directory
* containing the `package.json` file.
*
* In: /home/nate/node-native-module/lib/index.js
* Out: /home/nate/node-native-module
*/
exports.getRoot = function getRoot (file) {
var dir = dirname(file)
, prev
while (true) {
if (dir === '.') {
// Avoids an infinite loop in rare cases, like the REPL
dir = process.cwd()
}
if (exists(join(dir, 'package.json')) || exists(join(dir, 'node_modules'))) {
// Found the 'package.json' file or 'node_modules' dir; we're done
return dir
}
if (prev === dir) {
// Got to the top
throw new Error('Could not find module root given file: "' + file
+ '". Do you have a `package.json` file? ')
}
// Try the parent dir next
prev = dir
dir = join(dir, '..')
}
}
Do you know what is going wrong here ?
Let's have an "error_test.js" file that contains:
db = db.getMongo().getDB( "mydb" );
db.mycol.insert( { hello : "world" } );
print("it is shown");
db.runCommand(
{
mapReduce: "mycol",
map: function(){ print(not_exists); },
reduce: function(key, values){},
out: { replace: "myrescol" }
}
);
print("it is shown too (after error in mapreduce!)");
If I run the file (in Windows command line), I get:
mypath>mongo error_test.js
MongoDB shell version: 2.4.0
connecting to: test
it is shown
it is shown too (after error in mapreduce!)
mypath>echo %errorlevel%
0
mypath>
So we can deduce that:
the mapreduce error doesn't stop the execution.
the mapreduce error is not shown to the user.
the mapreduce error is not translated to the exit code (0 = success) (so a caller program can't detect the error).
The only way to know of the error is by looking for the following line at "mongod.log":
Wed Jun 12 10:02:37.393 [conn14] JavaScript execution failed: ReferenceError: not_exists is not defined near '(){ print(not_exists)'
Same happens if we use the "db.doEval(my_js)" method in Java and we put the content of "error_test.js" file into the "my_js" variable: The mapreduce error is not detected (no excepcion is launched, no null value is returned, "ok : 1.0" appears in the response).
So my question is: How can I detect an error in the mapreduce? (both in a js file and in the doEval() method)
Thank you
You need to capture the return document from db.runCommand() into a variable and then check its ok value in your script - you can then throw an error or print output, etc.
print("it is shown");
var res = db.runCommand(
{
mapReduce: "mycol",
map: function(){ print(not_exists); },
reduce: function(key, values){},
out: { replace: "myrescol" }
}
);
printjson(res);
if (res.ok == 0) {
print("Oopsy!");
throw("error! " + res.errmsg + " returned from mapreduce");
}
print("it is shown too (after error in mapreduce!)");
I'm trying to send an email using SmtpClient.try_send(). This code used to work before opa switch to the node.js backend :
import stdlib.web.mail.smtp.client
import stdlib.web.mail
function start()
{
Email.email from = {name:some("name"), address:{local:"contact", domain:"hello.com"}}
Email.email to = {name:some("name"), address:{local:"contact", domain:"hello.com"}}
Email.content content = {text : "This is Great!"}
SmtpClient.try_send(from, to, "subject",content, Email.default_options)
<>Hello</>
}
Server.start(
{port:8092, netmask:0.0.0.0, encryption: {no_encryption}, name:"test"},
[
{page: start, title: "test" }
]
)
But now it fails with the following error :
Test serving on http://ks3098156.kimsufi.com:8092
node.js:201
throw e; // process.nextTick error, or 'error' event on first tick
^
Error: write EPIPE
at errnoException (net.js:646:11)
at Object.afterWrite [as oncomplete] (net.js:480:18)
What's wrong ?
Thanks,
Can you try with :
SmtpClient.try_send(from, to, "subject", content, { Email.default_options with to:[to] })