Class variable is undefined... despite being defined - coffeescript

I have a class called RouteBinder that looks like this:
class RouteBinder
constructor: (#server, #pool) ->
bindRoute: (name, fn, method = "post", useDb = true) ->
#server[method]("/api/accounts/v1/" + name, (req, res, next) ->
client = await #pool.connect() if useDb?
await fn req, res, next, client
await #pool.release() if useDb?
)
I declare it and call it like this:
binder = new RouteBinder server, pool
binder.bindRoute "login", controllers.login
(Pool is node-postgres's Pool and is declared and tested earlier like this)
pool = new Pool
[...]
try
client = await pool.connect()
await client.query 'SELECT 1=1'
catch e
console.fatal "Could not connect to database: #{e}"
return
finally
try client.release() if client?
catch e
console.fatal "Couldn't release client: #{e}"
return
console.info 'Database is OK!'
When running this, I get this error:
02/14 18:44:34 error (node:11855) UnhandledPromiseRejectionWarning: TypeError: Cannot read property 'connect' of undefined
at _callee2$ (/home/vi/[redacted]_accounts/main.js:136:38)
at tryCatch (/home/vi/[redacted]_accounts/node_modules/regenerator-runtime/runtime.js:45:40)
at Generator.invoke [as _invoke] (/home/vi/[redacted]_accounts/node_modules/regenerator-runtime/runtime.js:271:22)
at Generator.prototype.(anonymous function) [as next] (/home/vi/[redacted]_accounts/node_modules/regenerator-runtime/runtime.js:97:21)
at asyncGeneratorStep (/home/vi/[redacted]_accounts/node_modules/#babel/runtime/helpers/asyncToGenerator.js:3:24)
at _next (/home/vi/[redacted]_accounts/node_modules/#babel/runtime/helpers/asyncToGenerator.js:25:9)
at /home/vi/[redacted]_accounts/node_modules/#babel/runtime/helpers/asyncToGenerator.js:32:7
at new Promise (<anonymous>)
at /home/vi/[redacted]_accounts/node_modules/#babel/runtime/helpers/asyncToGenerator.js:21:12
at /home/vi/[redacted]_accounts/main.js:166:26
02/14 18:44:34 error (node:11855) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
02/14 18:44:34 error (node:11855) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
I'm using CoffeeScript compiled transpiled with Babel. My .babelrc looks like this:
{
"presets": ["#babel/env"],
"plugins": [
["#babel/plugin-transform-runtime",
{
"regenerator": true
}
]
]
}
Sorry if it's a rookie question, I'm still learning and would love all the advice I can get.

I figured out my mistake. Both #pool and #server were defined, however, the inline function (handler) for #server[method] was running in the context of that function.
The solution was to bind it to the RouteBinder instance using .bind(#) (or .bind(this), if you prefer)
bindRoute: (name, fn, method = "post", useDb = true) ->
#server[method]("/api/accounts/v1/" + name, ((req, res, next) ->
console.log "pool", #pool
client = await #pool.connect() if useDb?
await fn req, res, next, client
await #pool.release() if useDb?
).bind(#))

Related

Flutter Web Error: [firebase_functions/internal] internal

I have a HTTP Callable Cloud Function written in Python that does some calculations and updates some Firestore documents.
It is actually working for both the android emulator and Chrome (Flutter-Web).
Still, I get the following error when I trigger it from Chrome (Flutter-Web):
Instance of '_Future<HttpsCallableResult<dynamic>>'
Error: [firebase_functions/internal] internal
at Object.throw_ [as throw] (http://localhost:54521/dart_sdk.js:5067:11)
at https_callable_web.HttpsCallableWeb.new.call (http://localhost:54521/packages/cloud_functions_web/https_callable_web.dart.lib.js:45:23)
at call.throw (<anonymous>)
at http://localhost:54521/dart_sdk.js:40576:38
at _RootZone.runBinary (http://localhost:54521/dart_sdk.js:40445:59)
at _FutureListener.thenAwait.handleError (http://localhost:54521/dart_sdk.js:35374:33)
at handleError (http://localhost:54521/dart_sdk.js:35947:51)
at Function._propagateToListeners (http://localhost:54521/dart_sdk.js:35973:17)
at _Future.new.[_completeError] (http://localhost:54521/dart_sdk.js:35823:23)
at async._AsyncCallbackEntry.new.callback (http://localhost:54521/dart_sdk.js:35859:31)
at Object._microtaskLoop (http://localhost:54521/dart_sdk.js:40708:13)
at _startMicrotaskLoop (http://localhost:54521/dart_sdk.js:40714:13)
at http://localhost:54521/dart_sdk.js:36191:9
In the GCP Log I do not have any error shown.
This is what I return from the CF return '{"status":"200", "data": "OK"}'
In the chrome developers tools under the Network tab and status I get a CORS error. I did read quite a lot of SO Questions and I did understand that the CORS error is apparently no the real reason of the error.
Also in the same tab (Network) under Headers -> Request Headers there is shown the following Provisional headers are shown, in the Payload the value {data:null} and Response has nothing to show, which is weird since I am returning a "data": "OK".
I am fully confused, since the error thrown: internal error is not leading me anywhere.
I finally fixed it by omitting region() on the cloud function.
My original code:
exports.checkAuth = functions.region("asia-southeast1").https.onCall(async (data, context: functions.https.CallableContext) => {
return `uid: ${context.auth?.uid ?? "X"} - email: ${context.auth?.token.email ?? "X"}`;
});
I changed it to:
exports.checkAuth = functions.https.onCall(async (data, context: functions.https.CallableContext) => {
return `uid: ${context.auth?.uid ?? "X"} - email: ${context.auth?.token.email ?? "X"}`;
});
===UPDATE===
The real root cause is the region you specified in the cloud functions and the firebase functions are different, for example in my original code I used:
functions.region("asia-southeast1").https.onCall()
So, when I instantiate firebase functions in main.dart I must do this:
void main() async {
...
final firebaseFunctions = FirebaseFunctions.instanceFor(region: 'asia-southeast1');
...
}

How to handle idle_in_transaction_session_timeout?

When we set idle_in_transaction_session_timeout, the database will terminate connections that are idle for some time.
This works as expected, but I wonder how we should deal with this situation in the aplication code.
We are using pg-promise 10.3.1.
Details of the test:
we set the connection pool size to 1, so that we only have a single session
we set the for the idle-transaction-session-timeout to 2.5 sec:
SET idle_in_transaction_session_timeout TO 2500
now the active transaction will be in state idle in transaction:
see What can cause “idle in transaction” for “BEGIN” statements
now we start a transaction and sleep for 5 seconds
after 2.5sec the database will terminate the session and send an error to the client:
pgp-error error: terminating connection due to idle-in-transaction timeout
after another 2.5sec the transactional code tries to send a query (via the already terminated session), and this fails as expected:
dbIdle failed Error: Client has encountered a connection error and is not queryable
then pg-promise will try to rollback the transaction which will also fail (also expected, I guess)
But now we start a new query and also this query fails with
dbCall failed Client has encountered a connection error and is not queryable
is this expected? I was hoping that pg-promise can somehow remove the "broken" connection from the pool and that we could get a new one
obvously this is not the case, so how should we deal with this situation: i.e. how to recover, so that we can send new queries to the database?
Code example:
import pgPromise, { IMain } from "pg-promise";
import * as dbConfig from "./db-config.json";
import { IConnectionParameters } from "pg-promise/typescript/pg-subset";
const cll = "pg";
console.time(cll);
const pgp: IMain = pgPromise({
query(e) {
console.timeLog(cll,`> ${e.query}`);
},
error(e, ctx) {
console.timeLog(cll,"pgp-error", e);
}
});
const connectParams: IConnectionParameters = {
...dbConfig,
application_name: "pg-test",
max: 1
};
const db = pgp(connectParams);
/**
* #param timeoutMs 0 is no timeout
*/
async function setDbIdleInTxTimeout(timeoutMs: number = 0) {
await db.any("SET idle_in_transaction_session_timeout TO $1;", timeoutMs);
}
async function dbIdle(sleepTimeSec: number) {
console.timeLog(cll, `starting db idle ${sleepTimeSec}`);
const result = await db.tx(async t => {
await new Promise(resolve => setTimeout(resolve, sleepTimeSec * 1000));
return t.one("Select $1 as sleep_sec", sleepTimeSec);
});
console.timeLog(cll, result);
}
async function main() {
await setDbIdleInTxTimeout(2500);
try {
await dbIdle(5);
} catch (e) {
console.timeLog(cll, "dbIdle failed", e);
}
try {
await db.one("Select 1+1 as res");
} catch (e) {
console.timeLog(cll, "dbCall failed", e);
}
}
main().finally(() => {
pgp.end();
});
Console output (removed some useless lines):
"C:\Program Files\nodejs\node.exe" D:\dev_no_backup\pg-promise-tx\dist\index.js
pg: 23.959ms > SET idle_in_transaction_session_timeout TO 2500;
pg: 28.696ms starting db idle 5
pg: 29.705ms > begin
pg: 2531.247ms pgp-error error: terminating connection due to idle-in-transaction timeout
at TCP.onStreamRead (internal/stream_base_commons.js:182:23) {
name: 'error',
severity: 'FATAL',
code: '25P03',
}
pg: 2533.569ms pgp-error Error: Connection terminated unexpectedly
pg: 5031.091ms > Select 5 as sleep_sec
pg: 5031.323ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5031.489ms > rollback
pg: 5031.570ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5031.953ms dbIdle failed Error: Client has encountered a connection error and is not queryable
pg: 5032.094ms > Select 1+1 as res
pg: 5032.164ms pgp-error Error: Client has encountered a connection error and is not queryable
pg: 5032.303ms dbCall failed Error: Client has encountered a connection error and is not queryable
Process finished with exit code 0
This issue #680 has been fixed in pg-promise 10.3.5

[Frisby]Can't report correctly if test is fail

I wrote RestAPI TEST with frisby.js.
If Test result is True, There is no probrem.
But If Test result is False, Frisby doesn't report correctly on Linux.(report correctly on windows)
following are sample codes:
const frisby = require('frisby');
const Joi = frisby.Joi;
describe('TEST', () => {
it ('should return a status of 200', (done) =>{
frisby
.get('https://jsonfeed.org/feed.json')
.expect('status', 400) //deliberately error
.done(done);
});
});
If this spec.js run on Windows, result is below
> jasmine-node .
F
Failures:
1) TEST should return a status of 200
Message:
Expected 'AssertionError: HTTP status 400 !== 200
at FrisbySpec.status ([mydirpath]\expects.js:25:12)
(snip)
But, If spec.js run on Linux(Ubuntu), result is below
(node:28704) UnhandledPromiseRejectionWarning: AssertionError [ERR_ASSERTION]: HTTP status 400 !== 200
at FrisbySpec.status (/home/nsco/jen_work/frisby/node_modules/frisby/src/frisby/expects.js:25:12)
at FrisbySpec._addExpect.response (/home/nsco/jen_work/frisby/node_modules/frisby/src/frisby/spec.js:368:23)
at FrisbySpec._runExpects (/home/nsco/jen_work/frisby/node_modules/frisby/src/frisby/spec.js:260:24)
at _fetch.fetch.then.then.responseBody (/home/nsco/jen_work/frisby/node_modules/frisby/src/frisby/spec.js:139:14)
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:160:7)
(node:28704) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). (rejection id: 1)
(node:28704) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
F
Failures:
1) TEST should return a status of 200
Message:
timeout: timed out after 5000 msec waiting for spec to complete
Stacktrace:
undefined
"Failures:" section is displayed as "Timeout".
(On windows, displayed as "Expected 'AssertionError: HTTP status 400 !== 200".It is correct.)
Environments:
frisby#2.0.11
jasmine-node#1.14.5
node/9.4.0
Ubuntu16.04

Error in onSnapshot: FirebaseError: [code=invalid-argument]: transaction closed

I'm recursively loading a tree stored in Firestore. The tree has ~79 nodes.
Very occasionally I'm getting this error (about one in ten full-tree loads).
Edit: the code: https://github.com/karol-depka/OrYoL
Edit: the example deployed: https://oryol.karoldepka.com/tree (sorry, no plunker for now, just this)
Details below.
Firebase version in package.json: 4.5.0
Questions:
Where can I get more info than this basic documentation https://firebase.google.com/docs/reference/js/firebase.FirebaseError
?
What is the source of the problem and how to fix it?
3VM724:27 Uncaught Error in onSnapshot: Error: transaction closed
at new FirestoreError (error.js:164)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:126)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:517)
at PersistentListenStream.webpackJsonp.../../../../firebase/firestore/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:334)
at persistent_stream.js:270
at persistent_stream.js:247
at async_queue.js:81
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
The code:
private processNodeEvents(nestLevel: number, snapshot: any, parents, listener: DbTreeListener) {
const serviceThis = this
snapshot.docChanges.forEach(function(change) {
let data = change.doc.data()
if (change.type === 'added') {
const parentsPath = serviceThis.nodesPath(parents)
console.log('node: ', nestLevel, parentsPath, data);
serviceThis.pendingListeners ++
data.node.onSnapshot(targetNodeDoc => {
serviceThis.pendingListeners --
listener.onNodeAdded(
new NodeAddEvent(parentsPath, parentsPath[parentsPath.length - 1], targetNodeDoc, targetNodeDoc.id,
serviceThis.pendingListeners))
console.log('target node:', nestLevel, targetNodeDoc)
console.log('target node title:', nestLevel, targetNodeDoc.data().title)
const subCollection = targetNodeDoc.ref.collection('subNodes')
console.log('subColl:', subCollection)
subCollection.onSnapshot((subSnap: QuerySnapshot) => {
const newParents = parents.slice(0)
newParents.push(targetNodeDoc.ref)
serviceThis.processNodeEvents(nestLevel + 1, subSnap, newParents, listener)
})
})
// console.log('root node ref: ', targetNode);
}
if (change.type === 'modified') {
console.log('Modified city: ', data);
}
if (change.type === 'removed') {
console.log('Removed city: ', data);
}
})
}
Edit: another error discovered, by running the code multiple times:
VM3343:27 Uncaught Error in onSnapshot: Error: The referenced transaction has expired or is no longer valid.
at new FirestoreError (error.js:164)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:126)
at JsonProtoSerializer.webpackJsonp.../../../../firebase/firestore/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:517)
at PersistentListenStream.webpackJsonp.../../../../firebase/firestore/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:334)
at persistent_stream.js:270
at persistent_stream.js:247
at async_queue.js:81
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
window.console.error # VM3343:27
Edit: update firebase to 4.6.0, problem happened 3 times:
Uncaught Error in onSnapshot: Error: transaction closed
at new FirestoreError (error.js:149)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:93)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:536)
at PersistentListenStream.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:309)
at persistent_stream.js:246
at persistent_stream.js:222
at async_queue.js:62
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
Edit: even if the error happens, the tree seems to continue loading.
Edit: another version of the error, with code=aborted (after upgrading firebase to 4.6.0):
Error in onSnapshot: FirebaseError: [code=aborted]: The referenced transaction has expired or is no longer valid.
/vendor.bundle.js:18588 errHandler()
/vendor.bundle.js:33367
/polyfills.bundle.js:2970 ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invokeTask()
/vendor.bundle.js:107276 Object.onInvokeTask()
/polyfills.bundle.js:2969 ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invokeTask()
/polyfills.bundle.js:2737 Zone.webpackJsonp.../../../../zone.js/dist/zone.js.Zone.runTask()
/polyfills.bundle.js:3044 webpackJsonp.../../../../zone.js/dist/zone.js.ZoneTask.invokeTask()
/polyfills.bundle.js:3033 ZoneTask.invoke()
I have more or less the same and it is random when it works and don't. I don't use snapshot, but valueChanges
ERROR Error: transaction closed
at new FirestoreError (error.js:149)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromRpcStatus (serializer.js:93)
at JsonProtoSerializer.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/serializer.js.JsonProtoSerializer.fromWatchChange (serializer.js:536)
at PersistentListenStream.webpackJsonp.../../../../#firebase/firestore/dist/esm/src/remote/persistent_stream.js.PersistentListenStream.onMessage (persistent_stream.js:309)
at persistent_stream.js:246
at persistent_stream.js:222
at async_queue.js:62
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:392)
at Object.onInvoke (core.es5.js:3890)
at ZoneDelegate.webpackJsonp.../../../../zone.js/dist/zone.js.ZoneDelegate.invoke (zone.js:391)
For me it's this peace of code that provokes the error:
return Observable.forkJoin(entries.map(entry => {
return this.getPick(entry)
}))
each entry in entries is used to return a single pick from firestore.
private getPick(entryId: number) {
return this.afs.collection<Pick>('entry/' + entryId + '/event/' + '9/' + 'picks', ref => ref.where('is_captain','==',true))
.valueChanges()
I don't mean to hijack your thread, but I feel this is very relevant. If I change the forkJoin to
Observable.forkJoin(entries.slice(0,20)...
then it works, so I guess it is some kind of overload of queries.

creating custom components and running with NoFlo

I am creating a custom component which interfaces with MongoDB. I wrote a CoffeeScript file which just connects to MongoDB, and stored it at noflo/components folder.
MongoBase.coffee
noflo = require "noflo"
mongodb = require "mongodb"
url = require "url"
class exports.MongoBase extends noflo.Component
constructor: ->
super
#inPorts =
url: new noflo.Port()
#inPorts.url.on "data", (data) =>
try
#parseConnectionString(data)
#MongoClient = mongodb.MongoClient;
#MongoClient.connect #serverUrl, (err, db) ->
if err
console.log("Error in connecting to MongoDB")
else
console.log("Connected to MongoDB")
catch error
console.log(error)
parseConnectionString: (connectionString) =>
databaseUrl = try
url.parse(connectionString)
catch error
console.log(error)
[..., #serverUrl, #databaseName] = databaseUrl.split('/')
#serverUrl = "mongo://" + #serverUrl
I added the following entry to component.json
"MongoBase": "components/MongoBase.coffee"
In addition to this, I created a mongo.fbp file to check the flow of the component. The FBP file has the following code:
'mongodb://localhost:27017/test' -> url DocReader(MongoBase)
On running noflo mongo.fbp, I get the following error:
/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:1628
edges.forEach(function (o, i) {
^
TypeError: Object #<Object> has no method 'forEach'
at Object.parser.registerEdges (/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:1628:15)
at peg$c25 (/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:60:50)
at peg$parseline (/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:749:30)
at peg$parsestart (/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:282:12)
at Object.parse (/home/saurabh/workspace/noflo/node_modules/fbp/lib/fbp.js:1650:18)
at Object.exports.loadFBP (/home/saurabh/workspace/noflo/lib/Graph.js:1065:33)
at /home/saurabh/workspace/noflo/lib/Graph.js:1116:24
at fs.js:268:14
at Object.oncomplete (fs.js:107:15)
Is there something wrong with my code, or the steps I am using to run the code?
You may have figured this out already, as it's been several month's since you asked, but I believe you need to add the getComponent() method to your class before you export it.
noflo = require "noflo"
mongodb = require "mongodb"
url = require "url"
class MongoBase extends noflo.Component
constructor: ->
super
#inPorts =
url: new noflo.Port()
#inPorts.url.on "data", (data) =>
try
#parseConnectionString(data)
#MongoClient = mongodb.MongoClient;
#MongoClient.connect #serverUrl, (err, db) ->
if err
console.log("Error in connecting to MongoDB")
else
console.log("Connected to MongoDB")
catch error
console.log(error)
parseConnectionString: (connectionString) =>
databaseUrl = try
url.parse(connectionString)
catch error
console.log(error)
[..., #serverUrl, #databaseName] = databaseUrl.split('/')
#serverUrl = "mongo://" + #serverUrl
MongoBase.getComponent = -> new MongoBase
exports.MongoBase = MongoBase
Additionally, in your graph for the component loader to work you need to specify the package your component lives in. If your package.json/component.json have a name entry like "name": "mongo-base" then you'd have to specify this in the FBP graph, like so:
'mongodb://localhost:27017/test' -> url DocReader(mongo-base/MongoBase)
N.B.: The loader clobbers any instances of 'noflo-' in the package name, so this needs to be taken into account. E.g. the name 'noflo-mongo' would get turned into just 'mongo', so when invoking the package's components you'd write in the fbp DocReader(mongo/MongoBase) and not DocReader(noflo-mongo/MongoBase).