flink sql client can't parse really simple sql? - flink-sql

Environment
flink version 1.15.1
kafka version kafka_2.12-2.2.0
CREATE TABLE kafka_test (
`event_time` TIMESTAMP(3) METADATA FROM 'timestamp',
`partition` BIGINT METADATA VIRTUAL,
`offset` BIGINT METADATA VIRTUAL,
`value` String
) WITH (
'connector' = 'kafka',
'topic' = 'first',
'properties.bootstrap.servers' = 'hxyh2:9092',
'properties.group.id' = 'testGroup',
'scan.startup.mode' = 'earliest-offset',
'format' = 'raw'
);
result:
When I execute 'select * from kafka_test' ,it work well show result realtime.
When I execute "select * from kafka_test where value like 'star'",it throw Exception which I don't what's that, I hope know what's possible problems cause this.
022-09-09 16:49:39,205 WARN org.apache.flink.table.client.cli.CliClient [] - Could not execute SQL statement.
org.apache.flink.table.client.gateway.SqlExecutionException: Failed to parse statement: select * from kafka_test where value like ‘start’;
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:174) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.SqlCommandParserImpl.parseCommand(SqlCommandParserImpl.java:45) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.SqlMultiLineParser.parse(SqlMultiLineParser.java:71) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.jline.reader.impl.LineReaderImpl.acceptLine(LineReaderImpl.java:2731) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.jline.reader.impl.LineReaderImpl.readLine(LineReaderImpl.java:585) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:296) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:281) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:229) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client-1.15.1.jar:1.15.1]
Caused by: org.apache.flink.table.api.SqlParserException: SQL parse failed. Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:82) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
Caused by: org.apache.calcite.sql.parser.SqlParseException: Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.convertException(FlinkSqlParserImpl.java:472) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.normalizeException(FlinkSqlParserImpl.java:235) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.handleException(SqlParser.java:140) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.parseStmtList(SqlParser.java:195) ~[?:?]
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:77) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
Caused by: org.apache.flink.sql.parser.impl.ParseException: Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.generateParseException(FlinkSqlParserImpl.java:42459) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.jj_consume_token(FlinkSqlParserImpl.java:42270) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression3(FlinkSqlParserImpl.java:21231) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression2b(FlinkSqlParserImpl.java:20828) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression2(FlinkSqlParserImpl.java:20869) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression(FlinkSqlParserImpl.java:20800) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.WhereOpt(FlinkSqlParserImpl.java:15587) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlSelect(FlinkSqlParserImpl.java:9013) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.LeafQuery(FlinkSqlParserImpl.java:714) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.LeafQueryOrExpr(FlinkSqlParserImpl.java:20783) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.QueryOrExpr(FlinkSqlParserImpl.java:20226) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.OrderedQueryOrExpr(FlinkSqlParserImpl.java:588) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmt(FlinkSqlParserImpl.java:3980) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmtList(FlinkSqlParserImpl.java:2911) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.parseSqlStmtList(FlinkSqlParserImpl.java:287) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.parseStmtList(SqlParser.java:193) ~[?:?]
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:77) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
thanks
I tried to find any detail to solve it and execute smoothly but failed.
Great thanks for anyone can give suggestion.

That's because you haven't used backticks to escape the word "values". Since values is on the list of reserved keywords (see https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/table/sql/overview/#reserved-keywords), this returns a grammar error

Related

Drools Rule Template error [Error: unclosed #if{} block. expected #end{}] when placeholder is used

drools rule template cannot be compiled if placeholder is used in subclass collection list.
I have this simple class structure
public class Parent {
private List<Child> children;
}
public class Child {
private String gender;
}
When placeholder #{gender} is used, ExternalSpreadsheetCompiler throw error
[Error: unclosed #if{} block. expected #end{}]
Parent($children: children)
Child(gender == "#{gender}") from $children
No issue when "MALE" is hardcoded in the template
Parent($children: children)
Child(gender == "MALE") from $children
Any idea? Thanks for your help.
Full error stack trace :
[Line: 42, Column: 0]
at org.drools.core.runtime.rule.impl.DefaultConsequenceExceptionHandler.handleException(DefaultConsequenceExceptionHandler.java:39)
at org.drools.core.common.DefaultAgenda.handleException(DefaultAgenda.java:1291)
at org.drools.core.phreak.RuleExecutor.innerFireActivation(RuleExecutor.java:442)
at org.drools.core.phreak.RuleExecutor.fireActivation(RuleExecutor.java:380)
at org.drools.core.phreak.RuleExecutor.fire(RuleExecutor.java:136)
at org.drools.core.phreak.RuleExecutor.evaluateNetworkAndFire(RuleExecutor.java:89)
at org.drools.core.concurrent.AbstractRuleEvaluator.internalEvaluateAndFire(AbstractRuleEvaluator.java:33)
at org.drools.core.concurrent.SequentialRuleEvaluator.evaluateAndFire(SequentialRuleEvaluator.java:43)
at org.drools.core.common.DefaultAgenda.fireLoop(DefaultAgenda.java:1101)
at org.drools.core.common.DefaultAgenda.internalFireAllRules(DefaultAgenda.java:1048)
at org.drools.core.common.DefaultAgenda.fireAllRules(DefaultAgenda.java:1040)
at org.drools.core.impl.StatefulKnowledgeSessionImpl.internalFireAllRules(StatefulKnowledgeSessionImpl.java:1341)
at org.drools.core.impl.StatefulKnowledgeSessionImpl.fireAllRules(StatefulKnowledgeSessionImpl.java:1332)
at org.drools.core.impl.StatefulKnowledgeSessionImpl.fireAllRules(StatefulKnowledgeSessionImpl.java:1316)
at org.drools.template.parser.TemplateDataListener.finishSheet(TemplateDataListener.java:158)
at org.drools.decisiontable.parser.xls.ExcelParser.finishSheet(ExcelParser.java:277)
at org.drools.decisiontable.parser.xls.ExcelParser.processSheet(ExcelParser.java:212)
at org.drools.decisiontable.parser.xls.ExcelParser.parseWorkbook(ExcelParser.java:105)
at org.drools.decisiontable.parser.xls.ExcelParser.parseFile(ExcelParser.java:84)
at org.drools.decisiontable.ExternalSpreadsheetCompiler.compile(ExternalSpreadsheetCompiler.java:140)
at org.drools.decisiontable.ExternalSpreadsheetCompiler.compile(ExternalSpreadsheetCompiler.java:168)
at org.drools.decisiontable.ExternalSpreadsheetCompiler.compile(ExternalSpreadsheetCompiler.java:99)
at org.drools.decisiontable.ExternalSpreadsheetCompiler.compile(ExternalSpreadsheetCompiler.java:85)
at com.dhl.gssdemo.config.SpringDroolsConfig.addTemplateContent(SpringDroolsConfig.java:112)
at com.dhl.gssdemo.config.SpringDroolsConfig.classpathDrlKieBase(SpringDroolsConfig.java:102)
at com.dhl.gssdemo.config.SpringDroolsConfig$$EnhancerBySpringCGLIB$$583d8208.CGLIB$classpathDrlKieBase$0(<generated>)
at com.dhl.gssdemo.config.SpringDroolsConfig$$EnhancerBySpringCGLIB$$583d8208$$FastClassBySpringCGLIB$$25a5fef6.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:244)
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:331)
at com.dhl.gssdemo.config.SpringDroolsConfig$$EnhancerBySpringCGLIB$$583d8208.classpathDrlKieBase(<generated>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:154)
... 52 common frames omitted
Caused by: java.lang.RuntimeException: [Error: unclosed #if{} block. expected #end{}]
I had the same issue I think its related to resolving MVEL expression but I resolved it by using the following workaround (use $$ instead of $)
E.g in your case
Parent($children: children)
Child(gender == "#{gender}") from $$children

How to read MongoDB DBRef on PrestoDB/PrestoSQL?

MongoDB has references from one document to another (something similar to a foreign key) with DBRefs, these are like other property with two parts, a name to the collection they are referencing ($ref) and the Id of the document they are referring ($id).
Both of them start with $, these DBRefs are not picked up automatically when querying the collections.
I defined them manually in the _schema collection like this:
{
"name" : "otherCollection",
"type" : "row($id ObjectId, $ref varchar)",
"hidden" : false
}
But it throws an exception at querying time. I'm using PrestoDB v0.232 but planning to move to PrestoSQL.
2020-03-12T20:56:32.164-0600 ERROR remote-task-callback-7 com.facebook.presto.execution.StageExecutionStateMachine Stage execution 20200313_025632_00008_hnu22.2.0 failed
com.google.common.util.concurrent.UncheckedExecutionException: java.lang.IllegalArgumentException: Bad type signature: 'row($id ObjectId)'
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2050)
at com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4964)
at com.facebook.presto.mongodb.MongoSession.getTable(MongoSession.java:153)
at com.facebook.presto.mongodb.MongoMetadata.getTableMetadata(MongoMetadata.java:278)
at com.facebook.presto.mongodb.MongoMetadata.listTableColumns(MongoMetadata.java:130)
at com.facebook.presto.metadata.MetadataManager.listTableColumns(MetadataManager.java:590)
at com.facebook.presto.metadata.MetadataListing.listTableColumns(MetadataListing.java:93)
at com.facebook.presto.connector.system.jdbc.ColumnJdbcTable.cursor(ColumnJdbcTable.java:126)
at com.facebook.presto.connector.system.SystemPageSourceProvider$1.cursor(SystemPageSourceProvider.java:124)
at com.facebook.presto.split.MappedRecordSet.cursor(MappedRecordSet.java:53)
at com.facebook.presto.spi.RecordPageSource.<init>(RecordPageSource.java:38)
at com.facebook.presto.connector.system.SystemPageSourceProvider.createPageSource(SystemPageSourceProvider.java:103)
at com.facebook.presto.spi.connector.ConnectorPageSourceProvider.createPageSource(ConnectorPageSourceProvider.java:40)
at com.facebook.presto.split.PageSourceManager.createPageSource(PageSourceManager.java:58)
at com.facebook.presto.operator.ScanFilterAndProjectOperator.getOutput(ScanFilterAndProjectOperator.java:227)
at com.facebook.presto.operator.Driver.processInternal(Driver.java:379)
at com.facebook.presto.operator.Driver.lambda$processFor$8(Driver.java:283)
at com.facebook.presto.operator.Driver.tryWithLock(Driver.java:675)
at com.facebook.presto.operator.Driver.processFor(Driver.java:276)
at com.facebook.presto.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1077)
at com.facebook.presto.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:162)
at com.facebook.presto.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:545)
at com.facebook.presto.$gen.Presto_0_232_cc1019c____20200313_025536_1.run(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.IllegalArgumentException: Bad type signature: 'row($id ObjectId)'
at com.facebook.presto.spi.type.TypeSignature.checkArgument(TypeSignature.java:360)
at com.facebook.presto.spi.type.TypeSignature.parseRowTypeSignature(TypeSignature.java:200)
at com.facebook.presto.spi.type.TypeSignature.parseTypeSignature(TypeSignature.java:119)
at com.facebook.presto.spi.type.TypeSignature.parseTypeSignature(TypeSignature.java:106)
at com.facebook.presto.mongodb.MongoSession.buildColumnHandle(MongoSession.java:197)
at com.facebook.presto.mongodb.MongoSession.loadTableSchema(MongoSession.java:183)
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:165)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
... 28 more
Is there a way to read these kind of fields?
UPDATE:
I tried moving to PrestoSQL latest version (331) and surrounding with ", like this:
"{
"name" : "otherCollection",
"type" : "row(\"$id\" ObjectId, \"$ref\" varchar)",
"hidden" : false
}"
And the column started to be displayed on querys, but the contents are always null. Also tried surrounding with ' but in that case got this exception:
java.util.concurrent.ExecutionException: io.prestosql.sql.parser.ParsingException: line 1:5: mismatched input ''$id''. Expecting: <type>
at com.google.common.util.concurrent.AbstractFuture.getDoneValue(AbstractFuture.java:531)
at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:492)
at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.get(AbstractFuture.java:83)
at com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:196)
at com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2312)
at com.google.common.cache.LocalCache$Segment$1.run(LocalCache.java:2292)
at com.google.common.util.concurrent.MoreExecutors$DirectExecutor.execute(MoreExecutors.java:398)
at com.google.common.util.concurrent.AbstractFuture.executeListener(AbstractFuture.java:1029)
at com.google.common.util.concurrent.AbstractFuture.addListener(AbstractFuture.java:675)
at com.google.common.util.concurrent.AbstractFuture$TrustedFuture.addListener(AbstractFuture.java:105)
at com.google.common.cache.LocalCache$Segment.loadAsync(LocalCache.java:2287)
at com.google.common.cache.LocalCache$Segment.refresh(LocalCache.java:2359)
at com.google.common.cache.LocalCache$Segment.scheduleRefresh(LocalCache.java:2337)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2034)
at com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at com.google.common.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4964)
at io.prestosql.plugin.mongodb.MongoSession.getTable(MongoSession.java:164)
at io.prestosql.plugin.mongodb.MongoMetadata.getColumnHandles(MongoMetadata.java:114)
at io.prestosql.plugin.mongodb.MongoMetadata.getTableProperties(MongoMetadata.java:226)
at io.prestosql.metadata.MetadataManager.getTableProperties(MetadataManager.java:415)
at io.prestosql.sql.planner.DistributedExecutionPlanner.getTableInfo(DistributedExecutionPlanner.java:145)
at io.prestosql.sql.planner.DistributedExecutionPlanner.lambda$doPlan$0(DistributedExecutionPlanner.java:133)
at com.google.common.collect.CollectCollectors.lambda$toImmutableMap$1(CollectCollectors.java:61)
at java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169)
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:193)
at java.util.Collections$2.tryAdvance(Collections.java:4719)
at java.util.Collections$2.forEachRemaining(Collections.java:4727)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482)
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472)
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
at io.prestosql.sql.planner.DistributedExecutionPlanner.doPlan(DistributedExecutionPlanner.java:133)
at io.prestosql.sql.planner.DistributedExecutionPlanner.doPlan(DistributedExecutionPlanner.java:124)
at io.prestosql.sql.planner.DistributedExecutionPlanner.plan(DistributedExecutionPlanner.java:96)
at io.prestosql.execution.SqlQueryExecution.planDistribution(SqlQueryExecution.java:433)
at io.prestosql.execution.SqlQueryExecution.start(SqlQueryExecution.java:339)
at io.prestosql.$gen.Presto_331____20200318_020345_2.run(Unknown Source)
at io.prestosql.execution.SqlQueryManager.createQuery(SqlQueryManager.java:240)
at io.prestosql.dispatcher.LocalDispatchQuery.lambda$startExecution$7(LocalDispatchQuery.java:132)
at io.prestosql.$gen.Presto_331____20200318_020345_2.run(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: io.prestosql.sql.parser.ParsingException: line 1:5: mismatched input ''$id''. Expecting: <type>
at io.prestosql.sql.parser.ErrorHandler.syntaxError(ErrorHandler.java:108)
at org.antlr.v4.runtime.ProxyErrorListener.syntaxError(ProxyErrorListener.java:41)
at org.antlr.v4.runtime.Parser.notifyErrorListeners(Parser.java:544)
at org.antlr.v4.runtime.DefaultErrorStrategy.reportNoViableAlternative(DefaultErrorStrategy.java:310)
at org.antlr.v4.runtime.DefaultErrorStrategy.reportError(DefaultErrorStrategy.java:136)
at io.prestosql.sql.parser.SqlBaseParser.type(SqlBaseParser.java:10326)
at io.prestosql.sql.parser.SqlBaseParser.standaloneType(SqlBaseParser.java:386)
at io.prestosql.sql.parser.SqlParser.invokeParser(SqlParser.java:146)
at io.prestosql.sql.parser.SqlParser.createType(SqlParser.java:96)
at io.prestosql.metadata.TypeRegistry.fromSqlType(TypeRegistry.java:164)
at io.prestosql.metadata.MetadataManager.fromSqlType(MetadataManager.java:1262)
at io.prestosql.type.InternalTypeManager.fromSqlType(InternalTypeManager.java:51)
at io.prestosql.plugin.mongodb.MongoSession.buildColumnHandle(MongoSession.java:208)
at io.prestosql.plugin.mongodb.MongoSession.loadTableSchema(MongoSession.java:194)
at com.google.common.cache.CacheLoader$FunctionToCacheLoader.load(CacheLoader.java:165)
at com.google.common.cache.CacheLoader.reload(CacheLoader.java:100)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3531)
at com.google.common.cache.LocalCache$Segment.loadAsync(LocalCache.java:2286)
... 35 more
Caused by: org.antlr.v4.runtime.NoViableAltException
at org.antlr.v4.runtime.atn.ParserATNSimulator.noViableAlt(ParserATNSimulator.java:2028)
at org.antlr.v4.runtime.atn.ParserATNSimulator.execATN(ParserATNSimulator.java:467)
at org.antlr.v4.runtime.atn.ParserATNSimulator.adaptivePredict(ParserATNSimulator.java:393)
at io.prestosql.sql.parser.SqlBaseParser.type(SqlBaseParser.java:10012)
... 47 more

Ingestion tasks failing with IllegalArgumentException druid

We have been experiencing this issue but couldn't seem to find the root cause of it. All the indexing tasks seems to be failing
Logs:
2018-11-09T15:45:10,861 ERROR [task-runner-0-priority-0] io.druid.indexing.common.task.MergeTaskBase - Exception merging[test-requests]: {class=io.druid.indexing.common.task.MergeTaskBase, exceptionType=class java.lang.IllegalArgumentException, exceptionMessage=Instantiation of [simple type, class io.druid.segment.loading.LocalLoadSpec] value failed: [/data/druid/deep/test-requests/2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z/2018-10-31T14:21:43.268Z/0/index.zip] does not exist, interval=2018-09-21T11:00:00.000Z/2018-11-09T15:00:00.000Z}
java.lang.IllegalArgumentException: Instantiation of [simple type, class io.druid.segment.loading.LocalLoadSpec] value failed: [/data/druid/deep/test-requests/2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z/2018-10-31T14:21:43.268Z/0/index.zip] does not exist
at com.fasterxml.jackson.databind.ObjectMapper._convert(ObjectMapper.java:2774) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.ObjectMapper.convertValue(ObjectMapper.java:2700) ~[jackson-databind-2.4.6.jar:2.4.6]
at io.druid.segment.loading.SegmentLoaderLocalCacheManager.loadInLocation(SegmentLoaderLocalCacheManager.java:205) ~[druid-server-0.10.1.jar:0.10.1]
at io.druid.segment.loading.SegmentLoaderLocalCacheManager.loadInLocationWithStartMarker(SegmentLoaderLocalCacheManager.java:195) ~[druid-server-0.10.1.jar:0.10.1]
at io.druid.segment.loading.SegmentLoaderLocalCacheManager.loadSegmentWithRetry(SegmentLoaderLocalCacheManager.java:154) ~[druid-server-0.10.1.jar:0.10.1]
at io.druid.segment.loading.SegmentLoaderLocalCacheManager.getSegmentFiles(SegmentLoaderLocalCacheManager.java:130) ~[druid-server-0.10.1.jar:0.10.1]
at io.druid.indexing.common.TaskToolbox.fetchSegments(TaskToolbox.java:214) ~[druid-indexing-service-0.10.1.jar:0.10.1]
at io.druid.indexing.common.task.MergeTaskBase.run(MergeTaskBase.java:155) [druid-indexing-service-0.10.1.jar:0.10.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:436) [druid-indexing-service-0.10.1.jar:0.10.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:408) [druid-indexing-service-0.10.1.jar:0.10.1]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_144]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_144]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_144]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]
Caused by: com.fasterxml.jackson.databind.JsonMappingException: Instantiation of [simple type, class io.druid.segment.loading.LocalLoadSpec] value failed: [/data/druid/deep/test-requests/2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z/2018-10-31T14:21:43.268Z/0/index.zip] does not exist
at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.wrapException(StdValueInstantiator.java:405) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:234) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.impl.PropertyBasedCreator.build(PropertyBasedCreator.java:167) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeUsingPropertyBased(BeanDeserializer.java:398) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromObjectUsingNonDefault(BeanDeserializerBase.java:1064) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:264) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:156) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:126) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer._deserializeTypedForId(AsPropertyTypeDeserializer.java:113) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer.deserializeTypedFromObject(AsPropertyTypeDeserializer.java:84) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.AbstractDeserializer.deserializeWithType(AbstractDeserializer.java:132) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer.deserialize(TypeWrappedDeserializer.java:41) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.ObjectMapper._convert(ObjectMapper.java:2769) ~[jackson-databind-2.4.6.jar:2.4.6]
... 13 more
Caused by: java.lang.IllegalArgumentException: [/data/druid/deep/test-requests/2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z/2018-10-31T14:21:43.268Z/0/index.zip] does not exist
at com.google.common.base.Preconditions.checkArgument(Preconditions.java:148) ~[guava-16.0.1.jar:?]
at io.druid.segment.loading.LocalLoadSpec.<init>(LocalLoadSpec.java:51) ~[druid-server-0.10.1.jar:0.10.1]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_144]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_144]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_144]
at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_144]
at com.fasterxml.jackson.databind.introspect.AnnotatedConstructor.call(AnnotatedConstructor.java:125) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:230) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.impl.PropertyBasedCreator.build(PropertyBasedCreator.java:167) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeUsingPropertyBased(BeanDeserializer.java:398) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromObjectUsingNonDefault(BeanDeserializerBase.java:1064) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:264) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:156) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:126) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer._deserializeTypedForId(AsPropertyTypeDeserializer.java:113) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer.deserializeTypedFromObject(AsPropertyTypeDeserializer.java:84) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.AbstractDeserializer.deserializeWithType(AbstractDeserializer.java:132) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer.deserialize(TypeWrappedDeserializer.java:41) ~[jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.ObjectMapper._convert(ObjectMapper.java:2769) ~[jackson-databind-2.4.6.jar:2.4.6]
... 13 more
2018-11-09T15:45:10,866 INFO [task-runner-0-priority-0] io.druid.indexing.overlord.TaskRunnerUtils - Task [merge_test-requests_bdd572cb7e9d4d83752bb158970d34710efe6685_2018-11-09T15:44:59.482Z] status changed to [FAILED].
2018-11-09T15:45:10,868 INFO [task-runner-0-priority-0] io.druid.indexing.worker.executor.ExecutorLifecycle - Task completed with status: {
"id" : "merge_test-requests_bdd572cb7e9d4d83752bb158970d34710efe6685_2018-11-09T15:44:59.482Z",
"status" : "FAILED",
"duration" : 33
}
I was able to see that segment is being created in another path and deep storage doesn't have that segment.
2018-11-12T14:00:55,147 INFO [main] io.druid.indexing.worker.executor.ExecutorLifecycle - Running with task: {
"type" : "append",
"id" : "merge_test-requests_75a016d378f18e97af34fc7f22f2894d3d1142e5_2018-11-12T14:00:44.001Z",
"dataSource" : "test-requests",
"segments" : [ {
"dataSource" : "test-requests",
"interval" : "2018-09-21T11:00:00.000Z/2018-10-31T14:00:00.000Z",
"version" : "2018-10-31T14:21:43.268Z",
"loadSpec" : {
"type" : "local",
"path" : "/data/druid/deep/test-requests/2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z/2018-10-31T14:21:43.268Z/0/index.zip"
},
"dimensions" : "env,host,site",
"metrics" : "bytesSum,count,durationSum,unique_ips,unique_users",
"shardSpec" : {
"type" : "none"
},
"binaryVersion" : 9,
"size" : 1525630,
"identifier" : "test-requests_2018-09-21T11:00:00.000Z_2018-10-31T14:00:00.000Z_2018-10-31T14:21:43.268Z"
}, {
"dataSource" : "test-requests",
"interval" : "2018-10-31T15:00:00.000Z/2018-10-31T16:00:00.000Z",
"version" : "2018-10-31T15:00:00.000Z",
"loadSpec" : {
"type" : "local",
"path" : "/auto/stage-data/druid/data/test-requests/2018-10-31T15:00:00.000Z_2018-10-31T16:00:00.000Z/2018-10-31T15:00:00.000Z/0/index.zip"
The deep storage mentioned in properties file is deep_storage_path => '/auto/stage-data/druid/data'.

Why does Spark's GaussianMixture return identical clusters?

I'm using spark-1.5.2 to cluster a dataset using GaussianMixture. No errors occur other than the resulting GaussianMixtureModels and their weights are identical. The number of iterations it takes to reach the specified tolerance is about 2 which seems far too low.
What parameters can I adjust so that clusters form with different values?
import org.apache.spark.SparkContext
import org.apache.spark.rdd._
import org.apache.spark.mllib.clustering.GaussianMixture
import org.apache.spark.mllib.linalg.{Vector, Vectors}
def sparkContext: SparkContext = {
import org.apache.spark.SparkConf
new SparkContext(new SparkConf().setMaster("local[*]").setAppName("console"))
}
implicit val sc = sparkContext
def observationsRdd(implicit sc: SparkContext): RDD[Vector] = {
sc.textFile("observations.csv")
.map { line => Vectors.dense(line.split(",").map { _.toDouble }) }
}
val gmm = {new GaussianMixture()
.setK(6)
.setMaxIterations(1000)
.setConvergenceTol(0.001)
.setSeed(1)
.run(observationsRdd)}
for (i <- 0 until gmm.k) {
println("weight=%f\nmu=%s\nsigma=\n%s\n" format
(gmm.weights(i), gmm.gaussians(i).mu, gmm.gaussians(i).sigma))
}
Truncated output:
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
...
Additionally, the code, input data, and output data is available as a gist # https://gist.github.com/aaron-santos/91b4931a446c460e082b2b3055b9950f
Thank you
I ran your data through ELKI (I had to remove the last line, which is incomplete). It at first did not work either, which I assume is due to the scale of the attributes, along with the default initialization. Probably the same problem is present in Spark.
After scaling the data, I could get some reasonable clusters with ELKI (visualizing the first three of 13 dimensions):
But judging from the distribution of the data points I do not think Gaussian Mixture Modeling is appropriate for this data. The points appear to be grid-sampled from some hypersurface or some trajectories; not from Gaussian (!) distributions.
Here are the ELKI parameters I used:
-dbc.in /tmp/observations.csv
-dbc.filter normalization.columnwise.AttributeWiseVarianceNormalization
-algorithm clustering.em.EM -em.k 6
-em.centers RandomlyChosenInitialMeans -kmeans.seed 0
It may be worth experimenting with other clustering algorithms such as HDBSCAN, which can identify density-based clusters:
Parameters:
-dbc.in /tmp/observations.csv
-dbc.filter normalization.columnwise.AttributeWiseVarianceNormalization
-algorithm clustering.hierarchical.extraction.HDBSCANHierarchyExtraction
-algorithm SLINKHDBSCANLinearMemory
-hdbscan.minPts 50 -hdbscan.minclsize 100
I would also try OPTICS, as I find HDBSCAN to often only capture the core of a cluster (by design). From the OPTICS plot, I would not say the clusters are very clearly defined.
Apart from trying other clustering algorithms, I think you also need to work a lot on preprocessing and projecting your data, because it has very strong correlations. Try to put as much prior knowledge on the data into your preprocessing to improve results.

JHipster MongoDB connection authorization

I have problem with connection of the JHipster generated application to the secured mongodb instance. I have created mongodb user for database and granted readWrite role to it.
> show users
{
"_id" : "jhipster.jhipster",
"user" : "jhipster",
"db" : "jhipster",
"roles" : [
{
"role" : "readWrite",
"db" : "jhipster"
}
]
}
In the generated application I have added in application-dev.yml file configuration preferences for mongodb like so:
server:
port: 8080
spring:
profiles:
active: dev
data:
mongodb:
host: localhost
port: 27017
database: jhipster
authenticationDatabase: jhipster
username: jhipster
password: jhipster
mail:
baseUrl: http://localhost:8080
thymeleaf:
mode: XHTML
cache: false
metrics:
jmx.enabled: true
spark:
enabled: false
host: localhost
port: 9999
graphite:
enabled: false
host: localhost
port: 2003
prefix: jhipster
After launching the application with command mvn spring-boot:run I get the following stack trace:
[INFO] Scanning for projects...
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] Building jhipster 0.0.1-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO]
[INFO] >>> spring-boot-maven-plugin:1.2.1.RELEASE:run (default-cli) > test-compile # jhipster >>>
[INFO]
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-versions) # jhipster ---
[INFO]
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) # jhipster ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 4 resources
[INFO] Copying 8 resources
[INFO]
[INFO] --- maven-compiler-plugin:3.1:compile (default-compile) # jhipster ---
[INFO] Changes detected - recompiling the module!
[INFO] Compiling 82 source files to /Users/grega/Development/sandbox/jhipster/target/classes
[INFO]
[INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) # jhipster ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 2 resources
[INFO]
[INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) # jhipster ---
[INFO] Nothing to compile - all classes are up to date
[INFO]
[INFO] <<< spring-boot-maven-plugin:1.2.1.RELEASE:run (default-cli) < test-compile # jhipster <<<
[INFO]
[INFO] --- spring-boot-maven-plugin:1.2.1.RELEASE:run (default-cli) # jhipster ---
[INFO] Attaching agents: []
Listening for transport dt_socket at address: 5005
[INFO] com.mycompany.myapp.Application - Starting Application on Gregas-MacBook-Pro.local with PID 20096 (/Users/grega/Development/sandbox/jhipster/target/classes started by grega in /Users/grega/Development/sandbox/jhipster)
[DEBUG] com.mycompany.myapp.Application - Running with Spring Boot v1.2.1.RELEASE, Spring v4.1.4.RELEASE
[DEBUG] org.jboss.logging - Logging Provider: org.jboss.logging.Slf4jLoggerProvider
[DEBUG] com.mycompany.myapp.config.AsyncConfiguration - Creating Async Task Executor
[DEBUG] com.mycompany.myapp.config.MetricsConfiguration - Registering JVM gauges
[INFO] com.mycompany.myapp.config.MetricsConfiguration - Initializing Metrics JMX reporting
[DEBUG] com.mycompany.myapp.config.MailConfiguration - Configuring mail server
[INFO] com.mycompany.myapp.config.WebConfigurer - Web application configuration, using profiles: [dev]
[DEBUG] com.mycompany.myapp.config.WebConfigurer - Initializing Metrics registries
[DEBUG] com.mycompany.myapp.config.WebConfigurer - Registering Metrics Filter
[DEBUG] com.mycompany.myapp.config.WebConfigurer - Registering Metrics Servlet
[INFO] com.mycompany.myapp.config.WebConfigurer - Web application fully configured
[INFO] com.mycompany.myapp.Application - Running with Spring profile(s) : [dev]
[INFO] com.mycompany.myapp.config.ThymeleafConfiguration - loading non-reloadable mail messages resources
[DEBUG] com.mycompany.myapp.config.apidoc.SwaggerConfiguration - Starting Swagger
[DEBUG] com.mycompany.myapp.config.apidoc.SwaggerConfiguration - Started Swagger in 34 ms
[DEBUG] com.mycompany.myapp.config.CacheConfiguration - No cache
[DEBUG] com.mycompany.myapp.config.DatabaseConfiguration - Configuring Mongeez
[INFO] org.mongeez.reader.FilesetXMLReader - Num of changefiles 2
[WARN] org.springframework.boot.context.embedded.AnnotationConfigEmbeddedWebApplicationContext - Exception encountered during context initialization - cancelling refresh attempt
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'mongeez' defined in class path resource [com/mycompany/myapp/config/DatabaseConfiguration.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:599) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1111) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1006) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:504) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:303) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:299) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:762) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:757) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:480) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:118) [spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:691) [spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:321) [spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at com.mycompany.myapp.Application.main(Application.java:55) [classes/:na]
Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:189) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:588) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
... 15 common frames omitted
Caused by: com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at com.mongodb.CommandResult.getException(CommandResult.java:76) ~[mongo-java-driver-2.12.4.jar:na]
at com.mongodb.CommandResult.throwOnError(CommandResult.java:131) ~[mongo-java-driver-2.12.4.jar:na]
at com.mongodb.DB.eval(DB.java:461) ~[mongo-java-driver-2.12.4.jar:na]
at org.mongeez.dao.MongeezDao.runScript(MongeezDao.java:124) ~[mongeez-0.9.4.jar:na]
at org.mongeez.commands.Script.run(Script.java:32) ~[mongeez-0.9.4.jar:na]
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:53) ~[mongeez-0.9.4.jar:na]
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:42) ~[mongeez-0.9.4.jar:na]
at org.mongeez.Mongeez.process(Mongeez.java:40) ~[mongeez-0.9.4.jar:na]
at com.mycompany.myapp.config.DatabaseConfiguration.mongeez(DatabaseConfiguration.java:65) ~[classes/:na]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.CGLIB$mongeez$4(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3$$FastClassBySpringCGLIB$$e5d33dc7.invoke(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:228) ~[spring-core-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:309) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.mongeez(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.7.0_65]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[na:1.7.0_65]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.7.0_65]
at java.lang.reflect.Method.invoke(Method.java:606) ~[na:1.7.0_65]
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:162) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
... 16 common frames omitted
[INFO] com.mycompany.myapp.config.CacheConfiguration - Closing Cache Manager
[ERROR] org.springframework.boot.SpringApplication - Application startup failed
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'mongeez' defined in class path resource [com/mycompany/myapp/config/DatabaseConfiguration.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:599) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1111) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1006) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:504) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:303) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:299) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:762) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:757) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:480) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:118) ~[spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:691) ~[spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:321) ~[spring-boot-1.2.1.RELEASE.jar:1.2.1.RELEASE]
at com.mycompany.myapp.Application.main(Application.java:55) [classes/:na]
Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:189) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:588) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
... 15 common frames omitted
Caused by: com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at com.mongodb.CommandResult.getException(CommandResult.java:76) ~[mongo-java-driver-2.12.4.jar:na]
at com.mongodb.CommandResult.throwOnError(CommandResult.java:131) ~[mongo-java-driver-2.12.4.jar:na]
at com.mongodb.DB.eval(DB.java:461) ~[mongo-java-driver-2.12.4.jar:na]
at org.mongeez.dao.MongeezDao.runScript(MongeezDao.java:124) ~[mongeez-0.9.4.jar:na]
at org.mongeez.commands.Script.run(Script.java:32) ~[mongeez-0.9.4.jar:na]
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:53) ~[mongeez-0.9.4.jar:na]
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:42) ~[mongeez-0.9.4.jar:na]
at org.mongeez.Mongeez.process(Mongeez.java:40) ~[mongeez-0.9.4.jar:na]
at com.mycompany.myapp.config.DatabaseConfiguration.mongeez(DatabaseConfiguration.java:65) ~[classes/:na]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.CGLIB$mongeez$4(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3$$FastClassBySpringCGLIB$$e5d33dc7.invoke(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:228) ~[spring-core-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:309) ~[spring-context-4.1.4.RELEASE.jar:4.1.4.RELEASE]
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.mongeez(<generated>) ~[spring-core-4.1.4.RELEASE.jar:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.7.0_65]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[na:1.7.0_65]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.7.0_65]
at java.lang.reflect.Method.invoke(Method.java:606) ~[na:1.7.0_65]
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:162) ~[spring-beans-4.1.4.RELEASE.jar:4.1.4.RELEASE]
... 16 common frames omitted
Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'mongeez' defined in class path resource [com/mycompany/myapp/config/DatabaseConfiguration.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:599)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1111)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1006)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:504)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:476)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:303)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:230)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:299)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:762)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:757)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:480)
at org.springframework.boot.context.embedded.EmbeddedWebApplicationContext.refresh(EmbeddedWebApplicationContext.java:118)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:691)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:321)
at com.mycompany.myapp.Application.main(Application.java:55)
Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.mongeez.Mongeez]: Factory method 'mongeez' threw exception; nested exception is com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:189)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:588)
... 15 more
Caused by: com.mongodb.CommandFailureException: { "serverUsed" : "localhost:27017" , "ok" : 0.0 , "errmsg" : "not authorized on jhipster to execute command { $eval: \"db.T_AUTHORITY.insert({\"_id\" : \"ROLE_ADMIN\"});\n db.T_AUTHORITY.insert({\"_id\" : \"ROLE_USER\"});\", args: [] }" , "code" : 13}
at com.mongodb.CommandResult.getException(CommandResult.java:76)
at com.mongodb.CommandResult.throwOnError(CommandResult.java:131)
at com.mongodb.DB.eval(DB.java:461)
at org.mongeez.dao.MongeezDao.runScript(MongeezDao.java:124)
at org.mongeez.commands.Script.run(Script.java:32)
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:53)
at org.mongeez.ChangeSetExecutor.execute(ChangeSetExecutor.java:42)
at org.mongeez.Mongeez.process(Mongeez.java:40)
at com.mycompany.myapp.config.DatabaseConfiguration.mongeez(DatabaseConfiguration.java:65)
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.CGLIB$mongeez$4(<generated>)
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3$$FastClassBySpringCGLIB$$e5d33dc7.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invokeSuper(MethodProxy.java:228)
at org.springframework.context.annotation.ConfigurationClassEnhancer$BeanMethodInterceptor.intercept(ConfigurationClassEnhancer.java:309)
at com.mycompany.myapp.config.DatabaseConfiguration$$EnhancerBySpringCGLIB$$5c5942c3.mongeez(<generated>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:162)
... 16 more
[INFO] ------------------------------------------------------------------------
[INFO] BUILD SUCCESS
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 19.800 s
[INFO] Finished at: 2015-02-13T18:56:39+01:00
[INFO] Final Memory: 28M/228M
[INFO] ------------------------------------------------------------------------
Does anyone know what am I doing wrong?
I had the same problem for a day.
I still don't know the real reason of my problem (I had the same error stack), but I disabled SELinux and it worked.
(I did not have any notifications from SELinux though)
You should look that way !
Good luck
I am not sure why this is not working as I am still facing the same issue, but using the below properties work
spring.data.mongodb.uri=mongodb://username:password#localhost:27017/database_name
I had the same problem yesterday. The problem is that MongoDB 3.0 changed the default authentication mechanism from MONGODB-CR to SCRAM-SHA-1.
Try this (it worked for me):
1) Update to the latest version of Spring Data Mongodb
Add in the pom.xml in properties this:
<spring-data-releasetrain.version>Fowler-SR2</spring-data-releasetrain.version>
2) Add the latest version of the mongo-java-driver
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.0.4</version>
</dependency>
3) Set MongoClient specific options for setting credentials
Override
#Bean
public Mongo mongo() throws Exception {
return new MongoClient(singletonList(new ServerAddress(host, port)),
singletonList(MongoCredential.createCredential(username,database, password.toCharArray())));
}
If you would like more information I have written a detailed post about it:
http://ignaciosuay.com/how-to-connect-to-mongodb-3-0-using-spring-boot/
Mongeez uses the mongodb eval command to bootstrap the database. In some scenarios this command is not permitted / disabled. Therefore Mongeez will not work.
Instead of Mongeez you can use mongobee to provide the migration logic for your app.
This is my default mongobee migration code for jhipster that has the same effect as the mongeez one.
package your.package.name.config.dbmigrations;
import com.github.mongobee.changeset.ChangeLog;
import com.github.mongobee.changeset.ChangeSet;
import com.mongodb.BasicDBObjectBuilder;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Creates the initial database setup
*/
#ChangeLog(order = "001")
public class InitialSetupMigration {
private Map<String, String>[] authoritiesUser = new Map[]{new HashMap<>()};
private Map<String, String>[] authoritiesAdminAndUser = new Map[]{new HashMap<>(), new HashMap<>()};
{
authoritiesUser[0].put("_id", "ROLE_USER");
authoritiesAdminAndUser[0].put("_id", "ROLE_USER");
authoritiesAdminAndUser[1].put("_id", "ROLE_ADMIN");
}
#ChangeSet(order = "01", author = "initiator", id = "01-addAuthorities")
public void addAuthorities(DB db) {
DBCollection authorityCollection = db.getCollection("jhi_authority");
authorityCollection.insert(
BasicDBObjectBuilder.start()
.add("_id", "ROLE_ADMIN")
.get());
authorityCollection.insert(
BasicDBObjectBuilder.start()
.add("_id", "ROLE_USER")
.get());
}
#ChangeSet(order = "02", author = "initiator", id = "02-addUsers")
public void addUsers(DB db) {
DBCollection usersCollection = db.getCollection("jhi_user");
usersCollection.createIndex("login");
usersCollection.createIndex("email");
usersCollection.insert(BasicDBObjectBuilder.start()
.add("_id", "user-0")
.add("login", "system")
.add("password", "$2a$10$mE.qmcV0mFU5NcKh73TZx.z4ueI/.bDWbj0T1BYyqP481kGGarKLG")
.add("first_name", "")
.add("last_name", "System")
.add("email", "system#localhost")
.add("activated", "true")
.add("lang_key", "en")
.add("created_by", "system")
.add("created_date", new Date())
.add("authorities", authoritiesAdminAndUser)
.get()
);
usersCollection.insert(BasicDBObjectBuilder.start()
.add("_id", "user-1")
.add("login", "anonymousUser")
.add("password", "$2a$10$j8S5d7Sr7.8VTOYNviDPOeWX8KcYILUVJBsYV83Y5NtECayypx9lO")
.add("first_name", "Anonymous")
.add("last_name", "User")
.add("email", "anonymous#localhost")
.add("activated", "true")
.add("lang_key", "en")
.add("created_by", "system")
.add("created_date", new Date())
.add("authorities", new Map[]{})
.get()
);
usersCollection.insert(BasicDBObjectBuilder.start()
.add("_id", "user-2")
.add("login", "admin")
.add("password", "$2a$10$gSAhZrxMllrbgj/kkK9UceBPpChGWJA7SYIb1Mqo.n5aNLq1/oRrC")
.add("first_name", "admin")
.add("last_name", "Administrator")
.add("email", "admin#localhost")
.add("activated", "true")
.add("lang_key", "en")
.add("created_by", "system")
.add("created_date", new Date())
.add("authorities", authoritiesAdminAndUser)
.get()
);
usersCollection.insert(BasicDBObjectBuilder.start()
.add("_id", "user-3")
.add("login", "user")
.add("password", "$2a$10$VEjxo0jq2YG9Rbk2HmX9S.k1uZBGYUHdUcid3g/vfiEl7lwWgOH/K")
.add("first_name", "")
.add("last_name", "User")
.add("email", "user#localhost")
.add("activated", "true")
.add("lang_key", "en")
.add("created_by", "system")
.add("created_date", new Date())
.add("authorities", authoritiesUser)
.get()
);
}
#ChangeSet(author = "initiator", id = "03-addSocialUserConnection", order = "03")
public void addSocialUserConnection(DB db) {
DBCollection socialUserConnectionCollection = db.getCollection("jhi_social_user_connection");
socialUserConnectionCollection.createIndex(BasicDBObjectBuilder
.start("user_id", 1)
.add("provider_id", 1)
.add("provider_user_id", 1)
.get(),
"user-prov-provusr-idx", true);
}
}
In the DatabaseConfiguration.java file remove mongeez and add
#Bean
public Mongobee mongobee() {
log.debug("Configuring Mongobee");
Mongobee mongobee = new Mongobee(mongo);
mongobee.setDbName(mongoProperties.getDatabase());
mongobee.setChangeLogsScanPackage(
"de.shaere.sharecore.config.dbmigrations"); // package to scan for changesets
mongobee.setEnabled(true);
return mongobee;
}
And finally update your gradle file removing the mongeez dependency and adding:
compile group: 'com.github.mongobee', name: 'mongobee', version: mongobee_version
I've also opened a pull request to update the JHipster generator. Now we wait to see if the guys agree :)
The default encrypted password mentioned in the class InitialSetupMigration is wrong.
Changed credentials would be
username: admin
password: user