Why does Spark's GaussianMixture return identical clusters? - scala

I'm using spark-1.5.2 to cluster a dataset using GaussianMixture. No errors occur other than the resulting GaussianMixtureModels and their weights are identical. The number of iterations it takes to reach the specified tolerance is about 2 which seems far too low.
What parameters can I adjust so that clusters form with different values?
import org.apache.spark.SparkContext
import org.apache.spark.rdd._
import org.apache.spark.mllib.clustering.GaussianMixture
import org.apache.spark.mllib.linalg.{Vector, Vectors}
def sparkContext: SparkContext = {
import org.apache.spark.SparkConf
new SparkContext(new SparkConf().setMaster("local[*]").setAppName("console"))
}
implicit val sc = sparkContext
def observationsRdd(implicit sc: SparkContext): RDD[Vector] = {
sc.textFile("observations.csv")
.map { line => Vectors.dense(line.split(",").map { _.toDouble }) }
}
val gmm = {new GaussianMixture()
.setK(6)
.setMaxIterations(1000)
.setConvergenceTol(0.001)
.setSeed(1)
.run(observationsRdd)}
for (i <- 0 until gmm.k) {
println("weight=%f\nmu=%s\nsigma=\n%s\n" format
(gmm.weights(i), gmm.gaussians(i).mu, gmm.gaussians(i).sigma))
}
Truncated output:
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
weight=0.166667
mu=[4730.358845338535,4391.695550847029,4072.3224046605947,4253.183898304653,4454.124682202946,4775.553442796136,4980.3952860164545,4812.717637711368,5120.44449152493,2820.1827330505857,180.10291313557565,4189.185858050445,3690.793644067457]
sigma=
422700.24745093845 382225.3248240414 398121.9356855869 ... (13 total)
382225.3248240414 471186.33178427175 455777.0565262309 ...
398121.9356855869 455777.0565262309 461210.0532084378 ...
469361.3787142044 497432.39963363775 515341.1303306988 ...
474369.6318494179 482754.83801426284 500047.5114985542 ...
453832.62301188655 443147.58931290614 461017.7038258409 ...
458641.51202210854 433511.1974652861 452015.6655154465 ...
387980.29836054996 459673.3283909025 455118.78272128507 ...
461724.87201332086 423688.91832506843 442649.18455604656 ...
291940.48273324646 257309.1054220978 269116.23674394307 ...
16289.3063964479 14790.06803739929 15387.484828872432 ...
334045.5231910066 338403.3492767321 350531.7768916226 ...
280036.0894114749 267624.69326772855 279651.401859903 ...
...
Additionally, the code, input data, and output data is available as a gist # https://gist.github.com/aaron-santos/91b4931a446c460e082b2b3055b9950f
Thank you

I ran your data through ELKI (I had to remove the last line, which is incomplete). It at first did not work either, which I assume is due to the scale of the attributes, along with the default initialization. Probably the same problem is present in Spark.
After scaling the data, I could get some reasonable clusters with ELKI (visualizing the first three of 13 dimensions):
But judging from the distribution of the data points I do not think Gaussian Mixture Modeling is appropriate for this data. The points appear to be grid-sampled from some hypersurface or some trajectories; not from Gaussian (!) distributions.
Here are the ELKI parameters I used:
-dbc.in /tmp/observations.csv
-dbc.filter normalization.columnwise.AttributeWiseVarianceNormalization
-algorithm clustering.em.EM -em.k 6
-em.centers RandomlyChosenInitialMeans -kmeans.seed 0
It may be worth experimenting with other clustering algorithms such as HDBSCAN, which can identify density-based clusters:
Parameters:
-dbc.in /tmp/observations.csv
-dbc.filter normalization.columnwise.AttributeWiseVarianceNormalization
-algorithm clustering.hierarchical.extraction.HDBSCANHierarchyExtraction
-algorithm SLINKHDBSCANLinearMemory
-hdbscan.minPts 50 -hdbscan.minclsize 100
I would also try OPTICS, as I find HDBSCAN to often only capture the core of a cluster (by design). From the OPTICS plot, I would not say the clusters are very clearly defined.
Apart from trying other clustering algorithms, I think you also need to work a lot on preprocessing and projecting your data, because it has very strong correlations. Try to put as much prior knowledge on the data into your preprocessing to improve results.

Related

flink sql client can't parse really simple sql?

Environment
flink version 1.15.1
kafka version kafka_2.12-2.2.0
CREATE TABLE kafka_test (
`event_time` TIMESTAMP(3) METADATA FROM 'timestamp',
`partition` BIGINT METADATA VIRTUAL,
`offset` BIGINT METADATA VIRTUAL,
`value` String
) WITH (
'connector' = 'kafka',
'topic' = 'first',
'properties.bootstrap.servers' = 'hxyh2:9092',
'properties.group.id' = 'testGroup',
'scan.startup.mode' = 'earliest-offset',
'format' = 'raw'
);
result:
When I execute 'select * from kafka_test' ,it work well show result realtime.
When I execute "select * from kafka_test where value like 'star'",it throw Exception which I don't what's that, I hope know what's possible problems cause this.
022-09-09 16:49:39,205 WARN org.apache.flink.table.client.cli.CliClient [] - Could not execute SQL statement.
org.apache.flink.table.client.gateway.SqlExecutionException: Failed to parse statement: select * from kafka_test where value like ‘start’;
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:174) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.SqlCommandParserImpl.parseCommand(SqlCommandParserImpl.java:45) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.SqlMultiLineParser.parse(SqlMultiLineParser.java:71) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.jline.reader.impl.LineReaderImpl.acceptLine(LineReaderImpl.java:2731) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.jline.reader.impl.LineReaderImpl.readLine(LineReaderImpl.java:585) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.getAndExecuteStatements(CliClient.java:296) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.executeInteractive(CliClient.java:281) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.cli.CliClient.executeInInteractiveMode(CliClient.java:229) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:151) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:95) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.startClient(SqlClient.java:187) [flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:161) [flink-sql-client-1.15.1.jar:1.15.1]
Caused by: org.apache.flink.table.api.SqlParserException: SQL parse failed. Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:82) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
Caused by: org.apache.calcite.sql.parser.SqlParseException: Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.convertException(FlinkSqlParserImpl.java:472) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.normalizeException(FlinkSqlParserImpl.java:235) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.handleException(SqlParser.java:140) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.parseStmtList(SqlParser.java:195) ~[?:?]
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:77) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
Caused by: org.apache.flink.sql.parser.impl.ParseException: Encountered "value" at line 1, column 32.
Was expecting one of:
"CURSOR" ...
"EXISTS" ...
"NOT" ...
"ROW" ...
"(" ...
"+" ...
"-" ...
"INTERVAL" ...
<UNSIGNED_INTEGER_LITERAL> ...
<DECIMAL_NUMERIC_LITERAL> ...
<APPROX_NUMERIC_LITERAL> ...
<BINARY_STRING_LITERAL> ...
<PREFIXED_STRING_LITERAL> ...
<QUOTED_STRING> ...
<UNICODE_STRING_LITERAL> ...
<BIG_QUERY_DOUBLE_QUOTED_STRING> ...
<BIG_QUERY_QUOTED_STRING> ...
"TRUE" ...
"FALSE" ...
"UNKNOWN" ...
"NULL" ...
<LBRACE_D> ...
<LBRACE_T> ...
<LBRACE_TS> ...
"DATE" ...
"TIME" ...
"TIMESTAMP" ...
"?" ...
"CAST" ...
"EXTRACT" ...
"POSITION" ...
"CONVERT" ...
"TRANSLATE" ...
"OVERLAY" ...
"FLOOR" ...
"CEIL" ...
"CEILING" ...
"SUBSTRING" ...
"TRIM" ...
"CLASSIFIER" ...
"MATCH_NUMBER" ...
"RUNNING" ...
"PREV" ...
"NEXT" ...
"JSON_EXISTS" ...
"JSON_VALUE" ...
"JSON_QUERY" ...
"JSON_OBJECT" ...
"JSON_OBJECTAGG" ...
"JSON_ARRAY" ...
"JSON_ARRAYAGG" ...
<LBRACE_FN> ...
"MULTISET" ...
"ARRAY" ...
"PERIOD" ...
"SPECIFIC" ...
<IDENTIFIER> ...
<HYPHENATED_IDENTIFIER> ...
<QUOTED_IDENTIFIER> ...
<BACK_QUOTED_IDENTIFIER> ...
<BRACKET_QUOTED_IDENTIFIER> ...
<UNICODE_QUOTED_IDENTIFIER> ...
"ABS" ...
"AVG" ...
"CARDINALITY" ...
"CHAR_LENGTH" ...
"CHARACTER_LENGTH" ...
"COALESCE" ...
"COLLECT" ...
"COVAR_POP" ...
"COVAR_SAMP" ...
"CUME_DIST" ...
"COUNT" ...
"CURRENT_DATE" ...
"CURRENT_TIME" ...
"CURRENT_TIMESTAMP" ...
"DENSE_RANK" ...
"ELEMENT" ...
"EVERY" ...
"EXP" ...
"FIRST_VALUE" ...
"FUSION" ...
"INTERSECTION" ...
"GROUPING" ...
"HOUR" ...
"LAG" ...
"LEAD" ...
"LEFT" ...
"LAST_VALUE" ...
"LN" ...
"LOCALTIME" ...
"LOCALTIMESTAMP" ...
"LOWER" ...
"MAX" ...
"MIN" ...
"MINUTE" ...
"MOD" ...
"MONTH" ...
"NTH_VALUE" ...
"NTILE" ...
"NULLIF" ...
"OCTET_LENGTH" ...
"PERCENT_RANK" ...
"POWER" ...
"RANK" ...
"REGR_COUNT" ...
"REGR_SXX" ...
"REGR_SYY" ...
"RIGHT" ...
"ROW_NUMBER" ...
"SECOND" ...
"SOME" ...
"SQRT" ...
"STDDEV_POP" ...
"STDDEV_SAMP" ...
"SUM" ...
"UPPER" ...
"TRUNCATE" ...
"USER" ...
"VAR_POP" ...
"VAR_SAMP" ...
"YEAR" ...
"CURRENT_CATALOG" ...
"CURRENT_DEFAULT_TRANSFORM_GROUP" ...
"CURRENT_PATH" ...
"CURRENT_ROLE" ...
"CURRENT_SCHEMA" ...
"CURRENT_USER" ...
"SESSION_USER" ...
"SYSTEM_USER" ...
"NEW" ...
"CASE" ...
"CURRENT" ...
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.generateParseException(FlinkSqlParserImpl.java:42459) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.jj_consume_token(FlinkSqlParserImpl.java:42270) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression3(FlinkSqlParserImpl.java:21231) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression2b(FlinkSqlParserImpl.java:20828) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression2(FlinkSqlParserImpl.java:20869) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.Expression(FlinkSqlParserImpl.java:20800) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.WhereOpt(FlinkSqlParserImpl.java:15587) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlSelect(FlinkSqlParserImpl.java:9013) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.LeafQuery(FlinkSqlParserImpl.java:714) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.LeafQueryOrExpr(FlinkSqlParserImpl.java:20783) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.QueryOrExpr(FlinkSqlParserImpl.java:20226) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.OrderedQueryOrExpr(FlinkSqlParserImpl.java:588) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmt(FlinkSqlParserImpl.java:3980) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.SqlStmtList(FlinkSqlParserImpl.java:2911) ~[?:?]
at org.apache.flink.sql.parser.impl.FlinkSqlParserImpl.parseSqlStmtList(FlinkSqlParserImpl.java:287) ~[?:?]
at org.apache.calcite.sql.parser.SqlParser.parseStmtList(SqlParser.java:193) ~[?:?]
at org.apache.flink.table.planner.parse.CalciteParser.parseSqlList(CalciteParser.java:77) ~[?:?]
at org.apache.flink.table.planner.delegation.ParserImpl.parse(ParserImpl.java:101) ~[?:?]
at org.apache.flink.table.client.gateway.local.LocalExecutor.lambda$parseStatement$1(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.context.ExecutionContext.wrapClassLoader(ExecutionContext.java:88) ~[flink-sql-client-1.15.1.jar:1.15.1]
at org.apache.flink.table.client.gateway.local.LocalExecutor.parseStatement(LocalExecutor.java:172) ~[flink-sql-client-1.15.1.jar:1.15.1]
... 11 more
thanks
I tried to find any detail to solve it and execute smoothly but failed.
Great thanks for anyone can give suggestion.
That's because you haven't used backticks to escape the word "values". Since values is on the list of reserved keywords (see https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/table/sql/overview/#reserved-keywords), this returns a grammar error

Importing MUI5 TextField results in "Object(...) is not a function"

So, I am basically converting a project from:
Material-UI 0.19 to MUI 5.
babel-core 6.25 to #babel/core 7.16
React 16.3 to React 17.0.2
After installing all the required dependencies (see dependencies below), adjusting the source code to use the new MUI5 components, I am met with the error below:
TypeError: Object(...) is not a function
This is raised after importing the line
import TextField from '#mui/material/TextField';
Some portion of the stack trace:
.../node_modules/#emotion/cache/dist/emotion-cache.browser.esm.js:287
284 | } : rulesheet(function (rule) {
285 | currentSheet.insert(rule);
286 | })];
> 287 | var serializer = middleware(omnipresentPlugins.concat(stylisPlugins, finalizingPlugins));
288 |
289 | var stylis = function stylis(styles) {
290 | return serialize(compile(styles), serializer);
.../node_modules/#emotion/react/dist/emotion-element-99289b21.browser.esm.js:17
14 | // and we could have a special build just for that
15 | // but this is much easier and the native packages
16 | // might use a different theme context in the future anyway
> 17 | typeof HTMLElement !== 'undefined' ? /* #__PURE__ */createCache({
18 | key: 'css'
19 | }) : null);
20 |
Dependencies added
"#babel/core": "^7.16.0",
"#babel/preset-env": "^7.16.0",
"#babel/preset-react": "^7.16.0",
"#emotion/react": "^11.5.0",
"#emotion/styled": "^11.3.0",
"#material-ui/core": "^5.0.0-beta.5",
"#material-ui/styles": "^5.0.0-beta.5",
"#mui/icons-material": "^5.0.5",
"#mui/material": "^5.0.4",
"babel-core": "^7.0.0-bridge.0",
"babel-loader": "^7.1.1",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"terser-webpack-plugin-legacy": "^1.2.5",
Babel preset
"babel": {
presets": [
"#babel/preset-env",
"#babel/preset-react"
]
},
Webpack entry
{
test: /\.(js|jsx)$/,
include: paths.appSrc,
exclude: /node_modules/,
loader: require.resolve('babel-loader'),
options: {
compact: true,
},
},

Can't obtain table data via REST [duplicate]

This question already has answers here:
How to obtain Phoenix table data via HBase REST service
(2 answers)
Closed 6 years ago.
I created a HBase table using the Phoenix JDBC Driver in the following code snippet:
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
Connection conn = DriverManager.getConnection("jdbc:phoenix:serverurl:/hbase-unsecure");
System.out.println("got connection");
conn.createStatement().execute("CREATE TABLE IF NOT EXISTS phoenixtest (id BIGINT not null primary key, test VARCHAR)");
int inserted = conn.createStatement().executeUpdate("UPSERT INTO phoenixtest VALUES (5, '13%')");
conn.commit();
System.out.println("Inserted or updated " + inserted + " rows");
ResultSet rst = conn.createStatement().executeQuery("select * from phoenixtest");
while (rst.next()) {
System.out.println(rst.getString(1) + " " + rst.getString(2));
}
The table is created and the table-looping works fine.
Now I tried to obtain the table data also via HBase REST services as I know it from "native" HBase programming. But when I try e.g.
http://server-url:12345/phoenixtest/schema
I get the following exception
Not found
org.apache.hadoop.hbase.TableNotFoundException: phoenixtest
at org.apache.hadoop.hbase.client.HTable.getTableDescriptor(HTable.java:597)
at org.apache.hadoop.hbase.rest.SchemaResource.getTableSchema(SchemaResource.java:79)
at org.apache.hadoop.hbase.rest.SchemaResource.get(SchemaResource.java:94)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.sun.jersey.spi.container.JavaMethodInvokerFactory$1.invoke(JavaMethodInvokerFactory.java:60)
at com.sun.jersey.server.impl.model.method.dispatch.AbstractResourceMethodDispatchProvider$ResponseOutInvoker._dispatch(AbstractResourceMethodDispatchProvider.java:205)
at com.sun.jersey.server.impl.model.method.dispatch.ResourceJavaMethodDispatcher.dispatch(ResourceJavaMethodDispatcher.java:75)
at com.sun.jersey.server.impl.uri.rules.HttpMethodRule.accept(HttpMethodRule.java:288)
at com.sun.jersey.server.impl.uri.rules.SubLocatorRule.accept(SubLocatorRule.java:134)
at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
at com.sun.jersey.server.impl.uri.rules.SubLocatorRule.accept(SubLocatorRule.java:134)
at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
at com.sun.jersey.server.impl.uri.rules.ResourceClassRule.accept(ResourceClassRule.java:108)
at com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
at com.sun.jersey.server.impl.uri.rules.RootResourceClassesRule.accept(RootResourceClassesRule.java:84)
at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1469)
at com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1400)
at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1349)
at com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1339)
at com.sun.jersey.spi.container.servlet.WebComponent.service(WebComponent.java:416)
at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:537)
at com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:699)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:820)
at org.mortbay.jetty.servlet.ServletHolder.handle(ServletHolder.java:511)
at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1221)
at org.apache.hadoop.hbase.rest.filter.GzipFilter.doFilter(GzipFilter.java:76)
at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
at org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:399)
at org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182)
at org.mortbay.jetty.security.SecurityHandler.handle(SecurityHandler.java:216)
at org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:767)
at org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152)
at org.mortbay.jetty.Server.handle(Server.java:326)
at org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542)
at org.mortbay.jetty.HttpConnection$RequestHandler.headerComplete(HttpConnection.java:928)
at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:549)
at org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:212)
at org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404)
at org.mortbay.io.nio.SelectChannelEndPoint.run(SelectChannelEndPoint.java:410)
at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
When I call this /schema url for a table i created with the HBase Java API itself, I get a result:
{ NAME=> 'sensor-table', IS_META => 'false', COLUMNS => [ { NAME => 'alert', DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'ROW', REPLICATION_SCOPE => '0', COMPRESSION => 'SNAPPY', VERSIONS => '1', MIN_VERSIONS => '0', TTL => '2147483647', KEEP_DELETED_CELLS => 'FALSE', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true' }, { NAME => 'sensor', DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'ROW', REPLICATION_SCOPE => '0', COMPRESSION => 'SNAPPY', VERSIONS => '1', MIN_VERSIONS => '0', TTL => '2147483647', KEEP_DELETED_CELLS => 'FALSE', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true' } ] }
What's the difference between a "native" HBase table and one created with Phoenix? Why can't I get the table data / info via HBase REST service?
I got it, you have to write the table name in upper-case letters:
http://server-url:12345/PHOENIXTEST/schema
This is independent whether you wrote the table name in lower-case letters in the CREATE statement!

Redis with Redisson framework unpredictable behavior in scala

I use Redis and Redisson framework with Scala and in it, there are distributed Set and Publish \ Subscribe commands where implemented. When the Set accessed after the system received a message then whole behavior of the frameworks becomes unpredictable. Here are the exception and source code that creates it. Any ideas?
import org.redisson.RedissonClient
import org.redisson._
import org.redisson.core.{ RTopic, MessageListener }
import org.scalatest._
import org.slf4j.LoggerFactory
object RedissonTest {
val config = new Config().setUseLinuxNativeEpoll( true )
config.useSingleServer().setAddress("127.0.0.1:6379")
val redis = Redisson.create( config )
val set_test = java.util.UUID.randomUUID
val system_topic = "system_bus"
class RedissonTestSet extends FlatSpec with Matchers {
val topic:RTopic[String] = redis.getTopic( system_topic )
val redisSet:java.util.Set[String] = redis.getSet( set_test.toString )
it should "produce fucking exeption " in {
val listener = new MessageListener[ String ] () {
override def onMessage( chanel: String, message: String ) {
checkSet
}
}
topic.addListener( listener )
for ( i <- 1 to 1000 ) {
redisSet.add( i.toString )
}
topic.publish( new String( "hey this is the bug" ) )
}
def checkSet {
for ( i <-1 to 1000 ) {
if ( redisSet.contains( i.toString ) ) {
}
}
}
}
}
The exception:
io.netty.handler.codec.DecoderException: io.netty.util.concurrent.BlockingOperationException: DefaultPromise#447696da(incomplete)
at io.netty.handler.codec.ReplayingDecoder.callDecode(ReplayingDecoder.java:431) ~[netty-codec-4.0.34.Final.jar:4.0.34.Final]
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:244) ~[netty-codec-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:307) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:293) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:307) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:293) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:307) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:293) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:840) [netty-transport-4.0.34.Final.jar:4.0.34.Final]
at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:830) [netty-transport-native-epoll-4.0.34.Final-linux-x86_64.jar:na]
at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:348) [netty-transport-native-epoll-4.0.34.Final-linux-x86_64.jar:na]
at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:264) [netty-transport-native-epoll-4.0.34.Final-linux-x86_64.jar:na]
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:112) [netty-common-4.0.34.Final.jar:4.0.34.Final]
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137) [netty-common-4.0.34.Final.jar:4.0.34.Final]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_77]
Caused by: io.netty.util.concurrent.BlockingOperationException: DefaultPromise#447696da(incomplete)
at io.netty.util.concurrent.DefaultPromise.checkDeadLock(DefaultPromise.java:391) ~[netty-common-4.0.34.Final.jar:4.0.34.Final]
at io.netty.util.concurrent.DefaultPromise.awaitUninterruptibly(DefaultPromise.java:284) ~[netty-common-4.0.34.Final.jar:4.0.34.Final]
at io.netty.util.concurrent.DefaultPromise.awaitUninterruptibly(DefaultPromise.java:33) ~[netty-common-4.0.34.Final.jar:4.0.34.Final]
at org.redisson.command.CommandAsyncService.get(CommandAsyncService.java:84) ~[redisson-2.2.9.jar:na]
at org.redisson.RedissonObject.get(RedissonObject.java:49) ~[redisson-2.2.9.jar:na]
at org.redisson.RedissonSet.contains(RedissonSet.java:70) ~[redisson-2.2.9.jar:na]
at com.web3.RedissonTest$RedissonTestSet$$anonfun$checkSet$1.apply$mcVI$sp(ReddisonSetPubSubTest.scala:45) ~[test-classes/:na]
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) ~[scala-library-2.10.4.jar:na]
at com.web3.RedissonTest$RedissonTestSet.checkSet(ReddisonSetPubSubTest.scala:44) ~[test-classes/:na]
at com.web3.RedissonTest$RedissonTestSet$$anonfun$1$$anon$1.onMessage(ReddisonSetPubSubTest.scala:30) ~[test-classes/:na]
at com.web3.RedissonTest$RedissonTestSet$$anonfun$1$$anon$1.onMessage(ReddisonSetPubSubTest.scala:27) ~[test-classes/:na]
at org.redisson.PubSubMessageListener.onMessage(PubSubMessageListener.java:73) ~[redisson-2.2.9.jar:na]
at org.redisson.client.RedisPubSubConnection.onMessage(RedisPubSubConnection.java:68) ~[redisson-2.2.9.jar:na]
at org.redisson.client.handler.CommandDecoder.handleMultiResult(CommandDecoder.java:277) ~[redisson-2.2.9.jar:na]
at org.redisson.client.handler.CommandDecoder.decodeMulti(CommandDecoder.java:242) ~[redisson-2.2.9.jar:na]
at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:217) ~[redisson-2.2.9.jar:na]
at org.redisson.client.handler.CommandDecoder.decode(CommandDecoder.java:97) ~[redisson-2.2.9.jar:na]
at io.netty.handler.codec.ReplayingDecoder.callDecode(ReplayingDecoder.java:376) ~[netty-codec-4.0.34.Final.jar:4.0.34.Final]
... 16 common frames omitted
This bug fixed in Redisson 2.2.11

SCP Plugin configuration using Groovy script

I am facing issues when configuring the Jenkins SCP plugin using Groovy scripts. I tried several approaches, but I did not succeed to add SCP site entries to the Jenkins configuration.
I tried the following:
import be.certipost.hudson.plugin.*
import jenkins.model.*
import hudson.model.*
import hudson.utils.*
import hudson.util.CopyOnWriteList
import org.kohsuke.stapler.StaplerRequest
import org.kohsuke.stapler.QueryParameter;
// Get the Jenkins instance.
def instance = Jenkins.getInstance()
// Get the Git plugin descriptor.
pluginURL = "be.certipost.hudson.plugin.SCPRepositoryPublisher"
def desc = instance.getDescriptor(pluginURL)
// Create a new SCPSite.
scpsite = new SCPSite('hostname', '22', 'username', 'password', 'rootRepositoryPath')
// Store it to an arraylist.
ArrayList<SCPSite> siteList = new ArrayList<SCPSite>();
siteList.add(scpsite)
// Also try to store it in a CopyOnWriteList.
lossitios = new CopyOnWriteList<SCPSite>()
lossitios.add(scpsite)
Each approach fails with an exception, either the replaceBy() method is not accessible, either I get an UnsupportedOperationException.
// Update the configuration (both approaches fail)
//desc.sites.replaceBy(siteList)
desc.sites.replaceBy(lossitios)
desc.save()
I also tried by using an approach I found here (https://github.com/jenkinsci/slack-plugin/issues/23):
scp = instance.getExtensionList(
be.certipost.hudson.plugin.SCPRepositoryPublisher.DescriptorImpl.class
)[0]
def params = [
hostname = "hostname",
port = 22,
username = "username",
password = "password",
rootRepositoryPath = "rootRepositoryPath",
]
def req = [
getParameter: { name -> params[name] }
] as org.kohsuke.stapler.StaplerRequest
desc.configure(req, null)
desc.save()
EDIT
The exception I get when I use the replaceBy() method is:
groovy.lang.MissingMethodException: No signature of method: [Lbe.certipost.hudson.plugin.SCPSite;.replaceBy() is applicable for argument types: (java.util.ArrayList) values: [[be.certipost.hudson.plugin.SCPSite#44605afb]]
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter.unwrap(ScriptBytecodeAdapter.java:55)
at org.codehaus.groovy.runtime.callsite.PojoMetaClassSite.call(PojoMetaClassSite.java:46)
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:42)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:108)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
at Script1.run(Script1.groovy:36)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:580)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:618)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:589)
at hudson.util.RemotingDiagnostics$Script.call(RemotingDiagnostics.java:142)
at hudson.util.RemotingDiagnostics$Script.call(RemotingDiagnostics.java:114)
at hudson.remoting.LocalChannel.call(LocalChannel.java:45)
at hudson.util.RemotingDiagnostics.executeGroovy(RemotingDiagnostics.java:111)
at jenkins.model.Jenkins._doScript(Jenkins.java:3566)
at jenkins.model.Jenkins.doScript(Jenkins.java:3538)
at sun.reflect.GeneratedMethodAccessor275.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.kohsuke.stapler.Function$InstanceFunction.invoke(Function.java:298)
at org.kohsuke.stapler.Function.bindAndInvoke(Function.java:161)
at org.kohsuke.stapler.Function.bindAndInvokeAndServeResponse(Function.java:96)
at org.kohsuke.stapler.MetaClass$1.doDispatch(MetaClass.java:121)
at org.kohsuke.stapler.NameBasedDispatcher.dispatch(NameBasedDispatcher.java:53)
at org.kohsuke.stapler.Stapler.tryInvoke(Stapler.java:746)
at org.kohsuke.stapler.Stapler.invoke(Stapler.java:876)
at org.kohsuke.stapler.Stapler.invoke(Stapler.java:649)
at org.kohsuke.stapler.Stapler.service(Stapler.java:238)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:848)
at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1494)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:132)
at hudson.plugins.greenballs.GreenBallFilter.doFilter(GreenBallFilter.java:59)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:129)
at net.bull.javamelody.MonitoringFilter.doFilter(MonitoringFilter.java:200)
at net.bull.javamelody.MonitoringFilter.doFilter(MonitoringFilter.java:178)
at net.bull.javamelody.PluginMonitoringFilter.doFilter(PluginMonitoringFilter.java:85)
at org.jvnet.hudson.plugins.monitoring.HudsonMonitoringFilter.doFilter(HudsonMonitoringFilter.java:99)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:129)
at hudson.util.PluginServletFilter.doFilter(PluginServletFilter.java:123)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.security.csrf.CrumbFilter.doFilter(CrumbFilter.java:49)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.security.ChainedServletFilter$1.doFilter(ChainedServletFilter.java:84)
at hudson.security.ChainedServletFilter.doFilter(ChainedServletFilter.java:76)
at hudson.security.HudsonFilter.doFilter(HudsonFilter.java:171)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at org.kohsuke.stapler.compression.CompressionFilter.doFilter(CompressionFilter.java:49)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.util.CharacterEncodingFilter.doFilter(CharacterEncodingFilter.java:81)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at org.kohsuke.stapler.DiagnosticThreadNameFilter.doFilter(DiagnosticThreadNameFilter.java:30)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1474)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:499)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137)
at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:533)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
at org.eclipse.jetty.server.Server.handle(Server.java:370)
at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489)
at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960)
at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021)
at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865)
at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240)
at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52)
at winstone.BoundedExecutorService$1.run(BoundedExecutorService.java:77)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
The exception I get when using the desc.configure() approach is:
java.lang.UnsupportedOperationException
at org.codehaus.groovy.runtime.ConvertedMap.invokeCustom(ConvertedMap.java:49)
at org.codehaus.groovy.runtime.ConversionHandler.invoke(ConversionHandler.java:82)
at com.sun.proxy.$Proxy59.bindParametersToList(Unknown Source)
at be.certipost.hudson.plugin.SCPRepositoryPublisher$DescriptorImpl.configure(SCPRepositoryPublisher.java:298)
at be.certipost.hudson.plugin.SCPRepositoryPublisher$DescriptorImpl$configure.call(Unknown Source)
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:42)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:108)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:120)
at Script1.run(Script1.groovy:53)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:580)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:618)
at groovy.lang.GroovyShell.evaluate(GroovyShell.java:589)
at hudson.util.RemotingDiagnostics$Script.call(RemotingDiagnostics.java:142)
at hudson.util.RemotingDiagnostics$Script.call(RemotingDiagnostics.java:114)
at hudson.remoting.LocalChannel.call(LocalChannel.java:45)
at hudson.util.RemotingDiagnostics.executeGroovy(RemotingDiagnostics.java:111)
at jenkins.model.Jenkins._doScript(Jenkins.java:3566)
at jenkins.model.Jenkins.doScript(Jenkins.java:3538)
at sun.reflect.GeneratedMethodAccessor275.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.kohsuke.stapler.Function$InstanceFunction.invoke(Function.java:298)
at org.kohsuke.stapler.Function.bindAndInvoke(Function.java:161)
at org.kohsuke.stapler.Function.bindAndInvokeAndServeResponse(Function.java:96)
at org.kohsuke.stapler.MetaClass$1.doDispatch(MetaClass.java:121)
at org.kohsuke.stapler.NameBasedDispatcher.dispatch(NameBasedDispatcher.java:53)
at org.kohsuke.stapler.Stapler.tryInvoke(Stapler.java:746)
at org.kohsuke.stapler.Stapler.invoke(Stapler.java:876)
at org.kohsuke.stapler.Stapler.invoke(Stapler.java:649)
at org.kohsuke.stapler.Stapler.service(Stapler.java:238)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:848)
at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1494)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:132)
at hudson.plugins.greenballs.GreenBallFilter.doFilter(GreenBallFilter.java:59)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:129)
at net.bull.javamelody.MonitoringFilter.doFilter(MonitoringFilter.java:200)
at net.bull.javamelody.MonitoringFilter.doFilter(MonitoringFilter.java:178)
at net.bull.javamelody.PluginMonitoringFilter.doFilter(PluginMonitoringFilter.java:85)
at org.jvnet.hudson.plugins.monitoring.HudsonMonitoringFilter.doFilter(HudsonMonitoringFilter.java:99)
at hudson.util.PluginServletFilter$1.doFilter(PluginServletFilter.java:129)
at hudson.util.PluginServletFilter.doFilter(PluginServletFilter.java:123)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.security.csrf.CrumbFilter.doFilter(CrumbFilter.java:49)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.security.ChainedServletFilter$1.doFilter(ChainedServletFilter.java:84)
at hudson.security.ChainedServletFilter.doFilter(ChainedServletFilter.java:76)
at hudson.security.HudsonFilter.doFilter(HudsonFilter.java:171)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at org.kohsuke.stapler.compression.CompressionFilter.doFilter(CompressionFilter.java:49)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at hudson.util.CharacterEncodingFilter.doFilter(CharacterEncodingFilter.java:81)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1482)
at org.kohsuke.stapler.DiagnosticThreadNameFilter.doFilter(DiagnosticThreadNameFilter.java:30)
at org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1474)
at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:499)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137)
at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:533)
at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231)
at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086)
at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428)
at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193)
at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020)
at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135)
at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116)
at org.eclipse.jetty.server.Server.handle(Server.java:370)
at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489)
at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960)
at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021)
at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865)
at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240)
at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668)
at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52)
at winstone.BoundedExecutorService$1.run(BoundedExecutorService.java:77)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Where Script1.run(Script1.groovy:53) is my call to desc.configure(req, null).
Recently had to solve this, the problem is in the request you are creating. If you look at the source code for he plugin at https://github.com/jenkinsci/scp-plugin/blob/7ce951d1a3861c6837d71f87cbb553feb7b6c222/src/main/java/be/certipost/hudson/plugin/SCPRepositoryPublisher.java#L318
#Override
public boolean configure(StaplerRequest req, JSONObject formData) {
sites.replaceBy(req.bindJSONToList(SCPSite.class,
formData.get("sites")));
save();
return true;
}
The call on the StaplerRequest object that you need to provide is the bindJSONToList, and secondly you also need to provide a JSONObject that can have '.get("sites")' called on it.
Following code solves both of these (probably some improvements possible since I just threw this together to get past the same issue):
import be.certipost.hudson.plugin.*;
import org.kohsuke.stapler.StaplerRequest;
import net.sf.json.JSONObject;
def instance = Jenkins.getInstance()
pluginURL = "be.certipost.hudson.plugin.SCPRepositoryPublisher"
def desc = instance.getDescriptor(pluginURL)
params = [
sites: [
// one site per line as an array of options
// displayname, hostname, port, username, password, keyfile, path
["site-name", "hostname", "22", "username", "password", null, "/root/path"]
]
]
scp_sites = params['sites'].collect { it -> it as SCPSite }
// notice the only call we are providing as for the the request
// object is bindJSONToList, and we don't care about the
// original arguments since just returning predefined data
def req = [
bindJSONToList: { klazz, data -> scp_sites }
] as org.kohsuke.stapler.StaplerRequest
desc.configure(req, params as JSONObject)
desc.save()