I am running javascript tests with functionality compiled by webpack in Karma. It seems that sources are compiled but not processed by karma, no test run.
testing.webpack.js
module.exports = {
devtool: 'inline-source-map',
resolve: {
extensions: ['.js']
},
module: {
rules: [
{
test: /\.js$/,
exclude: [/node_modules/],
use: [{
loader: 'babel-loader',
}]
}
]
}
};
There is my karma.conf.js
const webpackConfig = require('./testing.webpack.js');
module.exports = function (config) {
config.set({
basePath: './',
coverageReporter: {
dir: 'tmp/coverage/',
reporters: [
{ type: 'html', subdir: 'report-html' },
{ type: 'lcov', subdir: 'report-lcov' }
],
instrumenterOptions: {
istanbul: { noCompact: true }
}
},
files: [
'spec/**/*.spec.js'
],
frameworks: ['should', 'jasmine', 'mocha'],
reporters: ['mocha', 'coverage'],
preprocessors: {
'spec/**/*.spec.js': ['webpack', 'sourcemap']
},
plugins: [
'karma-webpack',
'karma-jasmine',
'karma-mocha',
'karma-should',
'karma-coverage',
'karma-chrome-launcher',
'karma-phantomjs-launcher',
'karma-mocha-reporter',
'karma-sourcemap-loader'
],
webpack: webpackConfig,
webpackMiddleware: {
stats: 'errors-only'
}
});
return config;
};
I receive the following output:
npx karma start karma.conf.js --single-run --browsers Chrome --debug
14 12 2020 15:54:55.608:DEBUG [config]: Loading config /home/victor/github/victor-shelepen/instance-locator/karma.conf.js
14 12 2020 15:54:55.612:DEBUG [config]: autoWatch set to false, because of singleRun
14 12 2020 15:54:55.613:DEBUG [karma-server]: Final config Config {
LOG_DISABLE: 'OFF',
LOG_ERROR: 'ERROR',
LOG_WARN: 'WARN',
LOG_INFO: 'INFO',
LOG_DEBUG: 'DEBUG',
frameworks: [ 'should', 'jasmine', 'mocha' ],
protocol: 'http:',
port: 9876,
listenAddress: '0.0.0.0',
hostname: 'localhost',
httpsServerConfig: {},
basePath: '/home/victor/github/victor-shelepen/instance-locator',
files: [
Pattern {
pattern: '/home/victor/github/victor-shelepen/instance-locator/spec/**/*.spec.js',
served: true,
included: true,
watched: false,
nocache: false,
weight: [ 1, 1, 1, 0, 0, 0 ],
type: undefined,
isBinary: undefined
}
],
browserConsoleLogOptions: { level: 'debug', format: '%b %T: %m', terminal: true },
customContextFile: null,
customDebugFile: null,
customClientContextFile: null,
exclude: [
'/home/victor/github/victor-shelepen/instance-locator/karma.conf.js'
],
logLevel: 'DEBUG',
colors: true,
autoWatch: false,
autoWatchBatchDelay: 250,
restartOnFileChange: false,
usePolling: true,
reporters: [ 'mocha', 'coverage' ],
singleRun: true,
browsers: [ 'Chrome' ],
captureTimeout: 60000,
pingTimeout: 5000,
proxies: {},
proxyValidateSSL: true,
preprocessors: [Object: null prototype] {
'/home/victor/github/victor-shelepen/instance-locator/spec/**/*.spec.js': [ 'webpack', 'sourcemap' ]
},
preprocessor_priority: {},
urlRoot: '/',
upstreamProxy: undefined,
reportSlowerThan: 0,
loggers: [
{
type: 'console',
layout: { type: 'pattern', pattern: '%[%d{DATE}:%p [%c]: %]%m' }
}
],
transports: [ 'polling', 'websocket' ],
forceJSONP: false,
plugins: [
'karma-webpack',
'karma-jasmine',
'karma-mocha',
'karma-should',
'karma-coverage',
'karma-chrome-launcher',
'karma-phantomjs-launcher',
'karma-mocha-reporter',
'karma-sourcemap-loader'
],
client: {
args: [],
useIframe: true,
runInParent: false,
captureConsole: true,
clearContext: true
},
defaultClient: {
args: [],
useIframe: true,
runInParent: false,
captureConsole: true,
clearContext: true
},
browserDisconnectTimeout: 2000,
browserDisconnectTolerance: 0,
browserNoActivityTimeout: 30000,
processKillTimeout: 2000,
concurrency: Infinity,
failOnEmptyTestSuite: true,
retryLimit: 2,
detached: false,
crossOriginAttribute: true,
browserSocketTimeout: 20000,
cmd: 'start',
debug: true,
configFile: '/home/victor/github/victor-shelepen/instance-locator/karma.conf.js',
coverageReporter: {
dir: 'tmp/coverage/',
reporters: [
{ type: 'html', subdir: 'report-html' },
{ type: 'lcov', subdir: 'report-lcov' }
],
instrumenterOptions: { istanbul: { noCompact: true } }
},
webpack: {
devtool: 'inline-source-map',
resolve: { extensions: [ '.js' ] },
module: {
rules: [
{
test: /\.js$/,
exclude: [ /node_modules/ ],
use: [ { loader: 'babel-loader' } ]
}
]
}
},
webpackMiddleware: { stats: 'errors-only' }
}
14 12 2020 15:54:55.614:DEBUG [plugin]: Loading plugin karma-webpack.
14 12 2020 15:54:55.664:DEBUG [plugin]: Loading plugin karma-jasmine.
14 12 2020 15:54:55.665:DEBUG [plugin]: Loading plugin karma-mocha.
14 12 2020 15:54:55.666:DEBUG [plugin]: Loading plugin karma-should.
14 12 2020 15:54:55.667:DEBUG [plugin]: Loading plugin karma-coverage.
14 12 2020 15:54:55.914:DEBUG [plugin]: Loading plugin karma-chrome-launcher.
14 12 2020 15:54:55.920:DEBUG [plugin]: Loading plugin karma-phantomjs-launcher.
14 12 2020 15:54:55.938:DEBUG [plugin]: Loading plugin karma-mocha-reporter.
14 12 2020 15:54:55.941:DEBUG [plugin]: Loading plugin karma-sourcemap-loader.
14 12 2020 15:54:55.956:DEBUG [web-server]: Instantiating middleware
14 12 2020 15:54:55.957:DEBUG [reporter]: Trying to load reporter: mocha
14 12 2020 15:54:55.958:DEBUG [reporter]: Trying to load color-version of reporter: mocha (mocha_color)
14 12 2020 15:54:55.959:DEBUG [reporter]: Couldn't load color-version.
14 12 2020 15:54:55.959:DEBUG [reporter]: Trying to load reporter: coverage
14 12 2020 15:54:55.959:DEBUG [reporter]: Trying to load color-version of reporter: coverage (coverage_color)
14 12 2020 15:54:55.959:DEBUG [reporter]: Couldn't load color-version.
START:
Webpack bundling...
asset runtime.js 11.4 KiB [compared for emit] (name: runtime)
asset commons.js 989 bytes [compared for emit] (name: commons) (id hint: commons)
asset another.spec.4218216441.js 175 bytes [compared for emit] (name: another.spec.4218216441)
Entrypoint another.spec.4218216441 12.5 KiB = runtime.js 11.4 KiB commons.js 989 bytes another.spec.4218216441.js 175 bytes
webpack 5.10.1 compiled successfully in 204 ms
14 12 2020 15:54:56.659:INFO [karma-server]: Karma v5.2.3 server started at http://localhost:9876/
14 12 2020 15:54:56.659:INFO [launcher]: Launching browsers Chrome with concurrency unlimited
14 12 2020 15:54:56.662:INFO [launcher]: Starting browser Chrome
14 12 2020 15:54:56.662:DEBUG [launcher]: null -> BEING_CAPTURED
14 12 2020 15:54:56.663:DEBUG [temp-dir]: Creating temp dir at /tmp/karma-27533261
14 12 2020 15:54:56.663:DEBUG [launcher]: google-chrome --user-data-dir=/tmp/karma-27533261 --enable-automation --no-default-browser-check --no-first-run --disable-default-apps --disable-popup-blocking --disable-translate --disable-background-timer-throttling --disable-renderer-backgrounding --disable-device-discovery-notifications http://localhost:9876/?id=27533261
14 12 2020 15:54:57.068:DEBUG [web-server]: serving: /home/victor/github/victor-shelepen/instance-locator/node_modules/karma/static/client.html
14 12 2020 15:54:57.150:DEBUG [web-server]: serving: /home/victor/github/victor-shelepen/instance-locator/node_modules/karma/static/karma.js
14 12 2020 15:54:57.229:DEBUG [karma-server]: A browser has connected on socket RYAt3YKj13i66X8RAAAA
14 12 2020 15:54:57.278:DEBUG [Chrome 87.0.4280.88 (Linux x86_64)]: undefined -> CONNECTED
14 12 2020 15:54:57.279:INFO [Chrome 87.0.4280.88 (Linux x86_64)]: Connected on socket RYAt3YKj13i66X8RAAAA with id 27533261
14 12 2020 15:54:57.280:DEBUG [launcher]: BEING_CAPTURED -> CAPTURED
14 12 2020 15:54:57.280:DEBUG [launcher]: Chrome (id 27533261) captured in 0.621 secs
14 12 2020 15:54:57.280:DEBUG [Chrome 87.0.4280.88 (Linux x86_64)]: CONNECTED -> CONFIGURING
14 12 2020 15:54:57.289:DEBUG [web-server]: serving: /home/victor/github/victor-shelepen/instance-locator/node_modules/karma/static/favicon.ico
14 12 2020 15:54:57.292:DEBUG [web-server]: upgrade /socket.io/?EIO=3&transport=websocket&sid=RYAt3YKj13i66X8RAAAA
14 12 2020 15:54:57.323:DEBUG [middleware:karma]: custom files null null null
14 12 2020 15:54:57.323:DEBUG [middleware:karma]: Serving static request /context.html
14 12 2020 15:54:57.325:DEBUG [web-server]: serving: /home/victor/github/victor-shelepen/instance-locator/node_modules/karma/static/context.html
14 12 2020 15:54:57.346:DEBUG [middleware:source-files]: Requesting /base/node_modules/mocha/mocha.js?143074c949211f445d6c1a8a431990c9849bf6ae
14 12 2020 15:54:57.347:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/mocha/mocha.js
14 12 2020 15:54:57.347:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/mocha/mocha.js
14 12 2020 15:54:57.352:DEBUG [middleware:source-files]: Requesting /base/node_modules/karma-mocha/lib/adapter.js?a0f4bbc139407501892ac58d70c2791e7adec343
14 12 2020 15:54:57.352:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-mocha/lib/adapter.js
14 12 2020 15:54:57.352:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-mocha/lib/adapter.js
14 12 2020 15:54:57.353:DEBUG [middleware:source-files]: Requesting /base/node_modules/jasmine-core/lib/jasmine-core/jasmine.js?8f66117bbfbdf7b03a8f43bc667e3a4421ce15de
14 12 2020 15:54:57.353:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/jasmine-core/lib/jasmine-core/jasmine.js
14 12 2020 15:54:57.354:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/jasmine-core/lib/jasmine-core/jasmine.js
14 12 2020 15:54:57.354:DEBUG [middleware:source-files]: Requesting /base/node_modules/karma-jasmine/lib/boot.js?760d54bbca4f739f1f8b252c1636d76201cc4e88
14 12 2020 15:54:57.355:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-jasmine/lib/boot.js
14 12 2020 15:54:57.355:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-jasmine/lib/boot.js
14 12 2020 15:54:57.356:DEBUG [web-server]: serving: /home/victor/github/victor-shelepen/instance-locator/node_modules/karma/static/context.js
14 12 2020 15:54:57.370:DEBUG [middleware:source-files]: Requesting /base/node_modules/karma-jasmine/lib/adapter.js?c22f41e6dc6770beb0be7c86dfade9637bce9290
14 12 2020 15:54:57.370:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-jasmine/lib/adapter.js
14 12 2020 15:54:57.370:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/karma-jasmine/lib/adapter.js
14 12 2020 15:54:57.372:DEBUG [middleware:source-files]: Requesting /base/node_modules/should/should.js?1aa5493eba423eb3fbfa86274d47aff5d2defc34
14 12 2020 15:54:57.372:DEBUG [middleware:source-files]: Fetching /home/victor/github/victor-shelepen/instance-locator/node_modules/should/should.js
14 12 2020 15:54:57.373:DEBUG [web-server]: serving (cached): /home/victor/github/victor-shelepen/instance-locator/node_modules/should/should.js
14 12 2020 15:54:57.374:DEBUG [middleware:source-files]: Requesting /absoluteanother.spec.4218216441.js?144f72c8ebc6aafdd231efe77b325a86fb00deba
14 12 2020 15:54:57.374:DEBUG [middleware:source-files]: Fetching another.spec.4218216441.js
14 12 2020 15:54:57.374:DEBUG [web-server]: serving (cached): another.spec.4218216441.js
14 12 2020 15:54:57.444:DEBUG [Chrome 87.0.4280.88 (Linux x86_64)]: CONFIGURING -> EXECUTING
14 12 2020 15:54:57.446:DEBUG [Chrome 87.0.4280.88 (Linux x86_64)]: EXECUTING -> CONNECTED
14 12 2020 15:54:57.447:DEBUG [launcher]: CAPTURED -> BEING_KILLED
14 12 2020 15:54:57.447:DEBUG [launcher]: BEING_KILLED -> BEING_FORCE_KILLED
Finished in 0.002 secs / 0 secs # 15:54:57 GMT+0200 (Eastern European Standard Time)
SUMMARY:
✔ 0 tests completed
14 12 2020 15:54:57.456:DEBUG [karma-server]: Run complete, exiting.
14 12 2020 15:54:57.457:DEBUG [launcher]: Disconnecting all browsers
14 12 2020 15:54:57.457:DEBUG [launcher]: BEING_FORCE_KILLED -> BEING_FORCE_KILLED
14 12 2020 15:54:57.457:DEBUG [proxy]: Destroying proxy agents
14 12 2020 15:54:57.486:DEBUG [coverage]: Writing coverage to /home/victor/github/victor-shelepen/instance-locator/tmp/coverage/report-html
14 12 2020 15:54:57.492:DEBUG [coverage]: Writing coverage to /home/victor/github/victor-shelepen/instance-locator/tmp/coverage/report-lcov
14 12 2020 15:54:57.500:DEBUG [launcher]: Process Chrome exited with code 0 and signal null
14 12 2020 15:54:57.500:DEBUG [temp-dir]: Cleaning temp dir /tmp/karma-27533261
14 12 2020 15:54:57.536:DEBUG [launcher]: Finished all browsers
14 12 2020 15:54:57.537:DEBUG [launcher]: BEING_FORCE_KILLED -> FINISHED
14 12 2020 15:54:57.537:DEBUG [launcher]: FINISHED -> FINISHED
I see that it has been compiled. another.spec.4218216441.js
another.spec.js
describe('Testing', () => {
it('G', () => {
should(1).be(1);
});
});
But no test run.
I will be pleased with a tip. Thank you.
Previously when using the alpha version of karma-webpack 5, if you did not include 'webpack' as a framework in your karma configuration everything would build, but no tests would be run just like this. 5.0.0 stable has been released that addresses this issue and fixes that on the fly. If you update to that it should work fine.
Related
It's my first time to use monstache.
In fact I've migrated my infrastructure from on premise to the cloud and I'm now using mongoDB Atlas, and AWS opensearch.
I've installed monstache on an aws ec2 instance and well configured it. Everything seems working and monstache is connected to Elasticsearch and MongoDB, but it's indexing documents that have been migratred into mongoDB atlas in Elasticsearch. It keeps waiting for events on my collection/index like this
[ec2-user#ip-172-31-1-200 ~]$ journalctl -u monstache.service -f
-- Logs begin at Wed 2022-11-09 10:22:04 UTC. --
Jan 26 08:54:00 ip-172-31-1-200.eu-west-3.compute.internal systemd[1]: Starting monstache sync service...
Jan 26 08:54:00 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:00 Started monstache version 6.1.0
Jan 26 08:54:00 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:00 Successfully connected to MongoDB version 4.4.18
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Successfully connected to Elasticsearch version 7.10.2
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal systemd[1]: Started monstache sync service.
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Joined cluster HA
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Starting work for cluster HA
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Listening for events
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Watching changes on collection wtlive.myuser
Jan 26 08:54:01 ip-172-31-1-200.eu-west-3.compute.internal monstache[27813]: INFO 2023/01/26 08:54:01 Resuming from timestamp {T:1674723241 I:1}
Should I absolutely initiate a write on the mongoDB collection for monstache to start syncing? Why doesn't it start syncing current data from mongoDB?
My elasticsearch still shows 0 document count while the collection is full of document in mongoDB.
[ec2-user#ip-172-31-0-5 ~]$ curl --insecure -u es-appuser https://vpc-wtlive-domain-staging-om2cbdeex4qk6trkdrcb3dg4vm.eu-west-3.es.amazonaws.com/_cat/indices?v
Enter host password for user 'es-appuser':
health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
green open wtlive.myuser YzqLx9_uTZ2qFVjFF2CMag 1 1 0 0 416b 208b
green open .opendistro_security Jb1fLqGjRd2vvluoX-ZgKw 1 1 9 4 129kb 64.4kb
green open .kibana_1 v6WdqQDvSN2L16EZTXxuHQ 1 1 30 2 70.4kb 33.4kb
green open .kibana_252235597_esappuser_1 OY1bbDGvTqK8oEgwopzbhQ 1 1 1 0 10.1kb 5kb
[ec2-user#ip-172-31-0-5 ~]$
Here is my monstache configuration :
[ec2-user#ip-172-31-1-200 ~]$ cat monstache/wtlive_pipeline.toml
enable-http-server = true
http-server-addr = ":8888"
#direct-read-namespaces = ["wtlive.myuser"]
change-stream-namespaces = ["wtlive.myuser"]
namespace-regex = '^wtlive.myuser$'
cluster-name="HA"
resume = true
replay = false
resume-write-unsafe = false
exit-after-direct-reads = false
elasticsearch-user = "es-appuser"
elasticsearch-password = "9V#xxxxxx"
elasticsearch-urls = ["https://vpc-wtlive-domain-staging-om2cbdeek6trkdrcb3dg4vm.eu-west-3.es.amazonaws.com"]
mongo-url = "mongodb://admin:VYn7ZD4CHDh8#wtlive-dedicated-shard-00-00.ynxpn.mongodb.net:27017,wtlive-dedicated-shard-00-01.ynxpn.mongodb.net:27017,wtlive-dedicated-shard-00-02.ynxpn.mongodb.net:27017/?tls=true&replicaSet=atlas-lmkye1-shard-0&authSource=admin&retryWrites=true&w=majority&tlsCAFile=/home/ec2-user/mongodb-ca.pem"
#[logs]
#info = "/home/ec2-user/logs/monstache/info.log"
#error = "/home/ec2-user/logs/monstache/error.log"
#warn = "/home/ec2-user/logs/monstache/warn.log"
#[[mapping]]
#namespace = "wtlive.myuser"
#index = "wtlive.myuser"
[[pipeline]]
namespace = "wtlive.myuser"
script = """
module.exports = function(ns, changeStream) {
if (changeStream) {
return [
{
$project: {
_id: 1,
operationType : 1,
clusterTime : 1,
documentKey : 1,
to : 1,
updateDescription : 1,
txnNumber : 1,
lsid : 1,
"fullDocument._id": 1,
"fullDocument.created": 1,
"fullDocument.lastVisit": 1,
"fullDocument.verified": 1,
"fullDocument.device.locale": "$fullDocument.device.locale",
"fullDocument.device.country": "$fullDocument.device.country",
"fullDocument.device.tz": "$fullDocument.device.tz",
"fullDocument.device.latLonCountry": "$fullDocument.device.latLonCountry",
"fullDocument.details.firstname": "$fullDocument._details.firstname",
"fullDocument.details.gender": "$fullDocument._details.gender",
"fullDocument.details.category": "$fullDocument._details.category",
"fullDocument.details.dob": "$fullDocument._details.dob",
"fullDocument.details.lookingFor": "$fullDocument._details.lookingFor",
"fullDocument.details.height": "$fullDocument._details.height",
"fullDocument.details.weight": "$fullDocument._details.weight",
"fullDocument.details.cigarette": "$fullDocument._details.cigarette",
"fullDocument.details.categorizedBy": "$fullDocument._details.categorizedBy",
"fullDocument.details.origin": "$fullDocument._details.origin",
"fullDocument.details.city": "$fullDocument._details.city",
"fullDocument.details.country": "$fullDocument._details.country",
"fullDocument.lifeSkills.educationLevel": "$fullDocument._lifeSkills.educationLevel",
"fullDocument.lifeSkills.pets": "$fullDocument._lifeSkills.pets",
"fullDocument.lifeSkills.religion": "$fullDocument._lifeSkills.religion",
"fullDocument.loveLife.children": "$fullDocument._loveLife.children",
"fullDocument.loveLife.relationType": "$fullDocument._loveLife.relationType",
"fullDocument.searchCriteria": "$fullDocument._searchCriteria",
"fullDocument.blocked" : 1,
"fullDocument.capping" : 1,
"fullDocument.fillingScore" : 1,
"fullDocument.viewed" : 1,
"fullDocument.likes" : 1,
"fullDocument.matches" : 1,
"fullDocument.blacklisted" : 1,
"fullDocument.uploadsList._id" : 1,
"fullDocument.uploadsList.status" : 1,
"fullDocument.uploadsList.url" : 1,
"fullDocument.uploadsList.position" : 1,
"fullDocument.uploadsList.imageSet" : 1,
"fullDocument.location" : 1,
"fullDocument.searchZone" : 1,
"fullDocument.locationPoint" : "$fullDocument.location.coordinates",
"fullDocument.selfieDateUpload" : 1,
"ns": 1
}
}
]
} else {
return [
{
$project: {
_id: 1,
"_id": 1,
"created": 1,
"lastVisit": 1,
"verified": 1,
"device.locale": "$device.locale",
"device.country": "$device.country",
"device.tz": "$device.tz",
"device.latLonCountry": "$device.latLonCountry",
"details.firstname": "$_details.firstname",
"details.gender": "$_details.gender",
"details.category": "$_details.category",
"details.dob": "$_details.dob",
"details.lookingFor": "$_details.lookingFor",
"details.height": "$_details.height",
"details.weight": "$_details.weight",
"details.cigarette": "$_details.cigarette",
"details.categorizedBy": "$_details.categorizedBy",
"details.origin": "$_details.origin",
"details.city": "$_details.city",
"details.country": "$_details.country",
"lifeSkills.educationLevel": "$_lifeSkills.educationLevel",
"lifeSkills.pets": "$_lifeSkills.pets",
"lifeSkills.religion": "$_lifeSkills.religion",
"loveLife.children": "$_loveLife.children",
"loveLife.relationType": "$_loveLife.relationType",
"searchCriteria": "$_searchCriteria",
"blocked" : 1,
"capping" : 1,
"fillingScore" : 1,
"viewed" : 1,
"likes" : 1,
"matches" : 1,
"blacklisted" : 1,
"uploadsList._id" : 1,
"uploadsList.status" : 1,
"uploadsList.url" : 1,
"uploadsList.position" : 1,
"uploadsList.imageSet" : 1,
"location" : 1,
"searchZone" : 1,
"selfieDateUpload" : 1,
"locationPoint" : "$location.coordinates"
}
}
]
}
}
"""
What could be the issue? And what action should I take from here please?
By uncommenting the #direct-read-namespaces = ["wtlive.myuser"] line, monstache can now do the initial sync, and everything is going well.
I'll comment out aigain and restart monstache service after the initial sync, to avoid re-syncing from scratch.
I have a mongo instance running with oplogMinRetentionHours set to 24 hours and max oplog size set to 50G. But despite this config settings oplog entries seem to be withhold indefinitely since oplog has entries past 24 hours and oplog size has reached 1.4 TB and .34 TB on disk
db.runCommand( { serverStatus: 1 } ).oplogTruncation.oplogMinRetentionHours
24 hrs
db.getReplicationInfo()
{
"logSizeMB" : 51200,
"usedMB" : 1464142.51,
"timeDiff" : 3601538,
"timeDiffHours" : 1000.43,
"tFirst" : "Fri Mar 19 2021 14:15:49 GMT+0000 (Greenwich Mean Time)",
"tLast" : "Fri Apr 30 2021 06:41:27 GMT+0000 (Greenwich Mean Time)",
"now" : "Fri Apr 30 2021 06:41:28 GMT+0000 (Greenwich Mean Time)"
}
MongoDB server version: 4.4.0
OS: Windows Server 2016 DataCenter 64bit
what I have noticed is event with super user with root role is not able to access replset.oplogTruncateAfterPoint, not sure if this is by design
mongod.log
{"t":{"$date":"2021-04-30T06:35:51.308+00:00"},"s":"I", "c":"ACCESS",
"id":20436, "ctx":"conn8","msg":"Checking authorization
failed","attr":{"error":{"code":13,"codeName":"Unauthorized","errmsg":"not
authorized on local to execute command { aggregate:
"replset.oplogTruncateAfterPoint", pipeline: [ { $indexStats: {} }
], cursor: { batchSize: 1000.0 }, $clusterTime: { clusterTime:
Timestamp(1619764547, 1), signature: { hash: BinData(0,
180A28389B6BBA22ACEB5D3517029CFF8D31D3D8), keyId: 6935907196995633156
} }, $db: "local" }"}}}
Not sure why mongo would not delete older entries from oplog?
Mongodb oplog truncation seems to be triggered with inserts. So as and when insert happens oplog gets truncated.
Request from restclient:
POST http://localhost:7050/chaincode
Request:
{
"jsonrpc": "2.0",
"method": "deploy",
"params": {
"type": 1,
"chaincodeID":{
"name": "raja"
},
"ctorMsg": {
"args":["init", "a", "100", "b", "200"]
}
},
"id": 5
}
Register java chain code with chaincode id name:
rajasekhar#rajasekhar-VirtualBox:~/mychaincode/src/github.com/hyperledger/fabric/examples/chaincode/java/chaincode_example02/build/distributions/chaincode_example02/bin$ CORE_CHAINCODE_ID_NAME=raja ./chaincode_example02
Jun 13, 2017 1:24:06 PM org.hyperledger.fabric.shim.ChaincodeBase newPeerClientConnection
INFO: Configuring channel connection to peer.
Jun 13, 2017 1:24:09 PM org.hyperledger.fabric.shim.ChaincodeBase chatWithPeer
INFO: Connecting to peer.
Jun 13, 2017 1:24:09 PM io.grpc.internal.TransportSet$1 call
INFO: Created transport io.grpc.netty.NettyClientTransport#599c4539(/127.0.0.1:7051) for /127.0.0.1:7051
Jun 13, 2017 1:24:10 PM io.grpc.internal.TransportSet$TransportListener transportReady
INFO: Transport io.grpc.netty.NettyClientTransport#599c4539(/127.0.0.1:7051) for /127.0.0.1:7051 is ready
Jun 13, 2017 1:24:10 PM org.hyperledger.fabric.shim.ChaincodeBase chatWithPeer
INFO: Registering as 'raja' ... sending REGISTER
java.lang.RuntimeException: [raja]Chaincode handler org.hyperledger.fabric.shim.fsm cannot handle message (INIT) with payload size (23) while in state: established
at org.hyperledger.fabric.shim.impl.Handler.handleMessage(Handler.java:493)
at org.hyperledger.fabric.shim.ChaincodeBase$1.onNext(ChaincodeBase.java:188)
at org.hyperledger.fabric.shim.ChaincodeBase$1.onNext(ChaincodeBase.java:181)
at io.grpc.stub.ClientCalls$StreamObserverToCallListenerAdapter.onMessage(ClientCalls.java:305)
at io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl$2.runInContext(ClientCallImpl.java:423)
at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:54)
at io.grpc.internal.SerializingExecutor$TaskRunner.run(SerializingExecutor.java:154)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:748)
you will need to provide more information as to what you have done so far. With just the error message - its not possible to pin point the cause of failure.
Here is excellent documentation available on how to write a Java Chain Code for Blockchain: https://www.ibm.com/developerworks/library/j-chaincode-for-java-developers/index.html
I am hoping you have seen the above documentation. Do go through the steps one by one. The documentation is extensive in terms of setting your environment to writing your first chain code in Java.
Hope this helps.
A couple of places propose this solution:
window.addEventListener('unhandledrejection', function(err) {
window.__karma__.error(err); // yeah private API ¯\_(ツ)_/¯
});
But it throws:
Uncaught TypeError: Cannot read property 'error' of undefined
I'm able to get reports of unhandled rejections with the following setup:
karma.conf.js:
module.exports = function(config) {
config.set({
basePath: '',
frameworks: ['mocha'],
files: [
'setup.js',
'test.js',
],
exclude: [],
preprocessors: {},
reporters: ['progress'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
concurrency: Infinity
});
};
setup.js:
window.addEventListener('unhandledrejection', function(ev) {
window.__karma__.error("unhandled rejection: " + ev.reason.message);
});
test.js:
it("test 1", () => {
Promise.reject(new Error("Q"));
});
it("test 2", (done) => {
setTimeout(done, 1000);
});
Separating setup.js from test.js is not necessary. I just like to have such setup code separate from the tests proper.
When I run karma start --single-run I get:
25 01 2017 07:20:07.521:INFO [karma]: Karma v1.4.0 server started at http://0.0.0.0:9876/
25 01 2017 07:20:07.523:INFO [launcher]: Launching browser Chrome with unlimited concurrency
25 01 2017 07:20:07.528:INFO [launcher]: Starting browser Chrome
25 01 2017 07:20:08.071:INFO [Chrome 55.0.2883 (Linux 0.0.0)]: Connected on socket g-BGwMfQLsQM128IAAAA with id 22107710
Chrome 55.0.2883 (Linux 0.0.0) ERROR
unhandled rejection: Q
Chrome 55.0.2883 (Linux 0.0.0): Executed 1 of 2 ERROR (0.006 secs / 0.001 secs)
Caveat
Reports of unhandled rejections are asynchronous. This has a few consequences.
The example I gave has a 2nd test that takes 1 second to complete. This gives time to the browser to report the unhandled rejection in the 1st test. Without having this delay, Karma terminates without detecting the unhandled rejection.
Another issue is that an unhandled rejection caused by test X may be discovered while test X+1 is running. The runner's report may make it look like X+1 is the test the caused the issue.
I follow the example to use rbd in kubernetes, but can not success. who can help me!! the error :
Nov 09 17:58:03 core-1-97 kubelet[1254]: E1109 17:58:03.289702 1254 volumes.go:114] Could not create volume builder for pod 5df3610e-86c8-11e5-bc34-002590fdf95c: can't use volume plugins for (volume.Spec){Name:(string)rbdpd VolumeSource:(api.VolumeSource){HostPath:(*api.HostPathVolumeSource)<nil> EmptyDir:(*api.EmptyDirVolumeSource)<nil> GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> GitRepo:(*api.GitRepoVolumeSource)<nil> Secret:(*api.SecretVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)<nil> RBD:(*api.RBDVolumeSource){CephMonitors:([]string)[10.14.1.33:6789 10.14.1.35:6789 10.14.1.36:6789] RBDImage:(string)foo FSType:(string)ext4 RBDPool:(string)rbd RadosUser:(string)admin Keyring:(string) SecretRef:(*api.LocalObjectReference){Name:(string)ceph-secret} ReadOnly:(bool)true}} PersistentVolumeSource:(api.PersistentVolumeSource){GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> HostPath:(*api.HostPathVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> RBD:(*api.RBDVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil>}}: no volume plugin matched
Nov 09 17:58:03 core-1-97 kubelet[1254]: E1109 17:58:03.289770 1254 kubelet.go:1210] Unable to mount volumes for pod "rbd2_default": can't use volume plugins for (volume.Spec){Name:(string)rbdpd VolumeSource:(api.VolumeSource){HostPath:(*api.HostPathVolumeSource)<nil> EmptyDir:(*api.EmptyDirVolumeSource)<nil> GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> GitRepo:(*api.GitRepoVolumeSource)<nil> Secret:(*api.SecretVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)<nil> RBD:(*api.RBDVolumeSource){CephMonitors:([]string)[10.14.1.33:6789 10.14.1.35:6789 10.14.1.36:6789] RBDImage:(string)foo FSType:(string)ext4 RBDPool:(string)rbd RadosUser:(string)admin Keyring:(string) SecretRef:(*api.LocalObjectReference){Name:(string)ceph-secret} ReadOnly:(bool)true}} PersistentVolumeSource:(api.PersistentVolumeSource){GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> HostPath:(*api.HostPathVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> RBD:(*api.RBDVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil>}}: no volume plugin matched; skipping pod
Nov 09 17:58:03 core-1-97 kubelet[1254]: E1109 17:58:03.299458 1254 pod_workers.go:111] Error syncing pod 5df3610e-86c8-11e5-bc34-002590fdf95c, skipping: can't use volume plugins for (volume.Spec){Name:(string)rbdpd VolumeSource:(api.VolumeSource){HostPath:(*api.HostPathVolumeSource)<nil> EmptyDir:(*api.EmptyDirVolumeSource)<nil> GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> GitRepo:(*api.GitRepoVolumeSource)<nil> Secret:(*api.SecretVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> PersistentVolumeClaim:(*api.PersistentVolumeClaimVolumeSource)<nil> RBD:(*api.RBDVolumeSource){CephMonitors:([]string)[10.14.1.33:6789 10.14.1.35:6789 10.14.1.36:6789] RBDImage:(string)foo FSType:(string)ext4 RBDPool:(string)rbd RadosUser:(string)admin Keyring:(string) SecretRef:(*api.LocalObjectReference){Name:(string)ceph-secret} ReadOnly:(bool)true}} PersistentVolumeSource:(api.PersistentVolumeSource){GCEPersistentDisk:(*api.GCEPersistentDiskVolumeSource)<nil> AWSElasticBlockStore:(*api.AWSElasticBlockStoreVolumeSource)<nil> HostPath:(*api.HostPathVolumeSource)<nil> Glusterfs:(*api.GlusterfsVolumeSource)<nil> NFS:(*api.NFSVolumeSource)<nil> RBD:(*api.RBDVolumeSource)<nil> ISCSI:(*api.ISCSIVolumeSource)<nil>}}: no volume plugin matched
And The template file I used rbd-with-secret.json:
core#core-1-94 ~/kubernetes/examples/rbd $ cat rbd-with-secret.json
{
"apiVersion": "v1",
"id": "rbdpd2",
"kind": "Pod",
"metadata": {
"name": "rbd2"
},
"spec": {
"nodeSelector": {"kubernetes.io/hostname" :"10.12.1.97"},
"containers": [
{
"name": "rbd-rw",
"image": "kubernetes/pause",
"volumeMounts": [
{
"mountPath": "/mnt/rbd",
"name": "rbdpd"
}
]
}
],
"volumes": [
{
"name": "rbdpd",
"rbd": {
"monitors": [
"10.14.1.33:6789",
"10.14.1.35:6789",
"10.14.1.36:6789"
],
"pool": "rbd",
"image": "foo",
"user": "admin",
"secretRef": {"name": "ceph-secret"},
"fsType": "ext4",
"readOnly": true
}
}
]
}
}
The secret:
apiVersion: v1
kind: Secret
metadata:
name: ceph-secret
data:
key: QVFBemV6bFdZTXdXQWhBQThxeG1IT2NKa0QrYnE0K3RZUmtsVncK
the ceph config is in /etc/ceph/
core#core-1-94 ~/kubernetes/examples/rbd $ ls -alh /etc/ceph
total 20K
drwxr-xr-x 2 root root 4.0K Nov 6 18:38 .
drwxr-xr-x 26 root root 4.0K Nov 9 17:07 ..
-rw------- 1 root root 63 Nov 4 11:27 ceph.client.admin.keyring
-rw-r--r-- 1 root root 264 Nov 6 18:38 ceph.conf
-rw-r--r-- 1 root root 384 Nov 6 14:35 ceph.conf.orig
-rw------- 1 root root 0 Nov 4 11:27 tmpkqDKwf
and the key as :
core#core-1-94 ~/kubernetes/examples/rbd $ sudo cat
/etc/ceph/ceph.client.admin.keyring
[client.admin]
key = AQAzezlWYMwWAhAA8qxmHOcJkD+bq4+tYRklVw==
You'll get "no volume plugins matched" if the rbd command isn't installed and in the path.
As the example specifies, you need to ensure that ceph is installed on your Kubernetes nodes. For instance, in Fedora:
$ sudo yum -y install ceph-common
I'll file an issue to clarify the error messages.