can not connect Pentaho report designer with MySQL database - charts

I would also ask if Pentaho support multi-source in one report(Ex: a chart takes its data from two different databases )
I am trying to use Pentaho to create some report charts, when i setup the configuration and click on test, it generates this error:
Error connecting to database [sqltest] : org.pentaho.di.core.exception.KettleDatabaseException:
Error occured while trying to connect to the database
Driver class 'org.gjt.mm.mysql.Driver' could not be found, make sure the 'MySQL' driver (jar file) is installed.
org.gjt.mm.mysql.Driver
org.pentaho.di.core.exception.KettleDatabaseException:
Error occured while trying to connect to the database
Driver class 'org.gjt.mm.mysql.Driver' could not be found, make sure the 'MySQL' driver (jar file) is installed.
org.gjt.mm.mysql.Driver
at org.pentaho.di.core.database.Database.normalConnect(Database.java:415)
at org.pentaho.di.core.database.Database.connect(Database.java:353)
at org.pentaho.di.core.database.Database.connect(Database.java:306)
at org.pentaho.di.core.database.Database.connect(Database.java:294)
at org.pentaho.di.core.database.DatabaseFactory.getConnectionTestReport(DatabaseFactory.java:84)
at org.pentaho.di.core.database.DatabaseMeta.testConnection(DatabaseMeta.java:2459)
at org.pentaho.ui.database.event.DataHandler.testDatabaseConnection(DataHandler.java:541)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.pentaho.ui.xul.impl.AbstractXulDomContainer.invoke(AbstractXulDomContainer.java:329)
at org.pentaho.ui.xul.swing.tags.SwingButton$OnClickRunnable.run(SwingButton.java:58)
at java.awt.event.InvocationEvent.dispatch(InvocationEvent.java:251)
at java.awt.EventQueue.dispatchEventImpl(EventQueue.java:705)
at java.awt.EventQueue.access$000(EventQueue.java:101)
at java.awt.EventQueue$3.run(EventQueue.java:666)
at java.awt.EventQueue$3.run(EventQueue.java:664)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
at java.awt.EventQueue.dispatchEvent(EventQueue.java:675)
at java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:211)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:128)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:121)
at java.awt.WaitDispatchSupport$2.run(WaitDispatchSupport.java:182)
at java.awt.WaitDispatchSupport$4.run(WaitDispatchSupport.java:221)
at java.security.AccessController.doPrivileged(Native Method)
at java.awt.WaitDispatchSupport.enter(WaitDispatchSupport.java:219)
at java.awt.Dialog.show(Dialog.java:1072)
at java.awt.Component.show(Component.java:1650)
at java.awt.Component.setVisible(Component.java:1602)
at java.awt.Window.setVisible(Window.java:1013)
at java.awt.Dialog.setVisible(Dialog.java:1003)
at org.pentaho.ui.xul.swing.tags.SwingDialog.show(SwingDialog.java:238)
at org.pentaho.reporting.ui.datasources.jdbc.ui.XulDatabaseDialog.open(XulDatabaseDialog.java:254)
at org.pentaho.reporting.ui.datasources.jdbc.ui.ConnectionPanel$AddDataSourceAction.actionPerformed(ConnectionPanel.java:252)
at javax.swing.AbstractButton.fireActionPerformed(AbstractButton.java:2018)
at javax.swing.AbstractButton$Handler.actionPerformed(AbstractButton.java:2341)
at javax.swing.DefaultButtonModel.fireActionPerformed(DefaultButtonModel.java:402)
at javax.swing.DefaultButtonModel.setPressed(DefaultButtonModel.java:259)
at javax.swing.plaf.basic.BasicButtonListener.mouseReleased(BasicButtonListener.java:252)
at java.awt.AWTEventMulticaster.mouseReleased(AWTEventMulticaster.java:289)
at java.awt.AWTEventMulticaster.mouseReleased(AWTEventMulticaster.java:289)
at java.awt.Component.processMouseEvent(Component.java:6504)
at javax.swing.JComponent.processMouseEvent(JComponent.java:3321)
at java.awt.Component.processEvent(Component.java:6269)
at java.awt.Container.processEvent(Container.java:2229)
at java.awt.Component.dispatchEventImpl(Component.java:4860)
at java.awt.Container.dispatchEventImpl(Container.java:2287)
at java.awt.Component.dispatchEvent(Component.java:4686)
at java.awt.LightweightDispatcher.retargetMouseEvent(Container.java:4832)
at java.awt.LightweightDispatcher.processMouseEvent(Container.java:4492)
at java.awt.LightweightDispatcher.dispatchEvent(Container.java:4422)
at java.awt.Container.dispatchEventImpl(Container.java:2273)
at java.awt.Window.dispatchEventImpl(Window.java:2713)
at java.awt.Component.dispatchEvent(Component.java:4686)
at java.awt.EventQueue.dispatchEventImpl(EventQueue.java:707)
at java.awt.EventQueue.access$000(EventQueue.java:101)
at java.awt.EventQueue$3.run(EventQueue.java:666)
at java.awt.EventQueue$3.run(EventQueue.java:664)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:87)
at java.awt.EventQueue$4.run(EventQueue.java:680)
at java.awt.EventQueue$4.run(EventQueue.java:678)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
at java.awt.EventQueue.dispatchEvent(EventQueue.java:677)
at java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:211)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:128)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:121)
at java.awt.WaitDispatchSupport$2.run(WaitDispatchSupport.java:182)
at java.awt.WaitDispatchSupport$4.run(WaitDispatchSupport.java:221)
at java.security.AccessController.doPrivileged(Native Method)
at java.awt.WaitDispatchSupport.enter(WaitDispatchSupport.java:219)
at java.awt.Dialog.show(Dialog.java:1072)
at java.awt.Component.show(Component.java:1650)
at java.awt.Component.setVisible(Component.java:1602)
at java.awt.Window.setVisible(Window.java:1013)
at java.awt.Dialog.setVisible(Dialog.java:1003)
at org.pentaho.reporting.libraries.designtime.swing.CommonDialog.setVisible(CommonDialog.java:281)
at org.pentaho.reporting.libraries.designtime.swing.CommonDialog.performEdit(CommonDialog.java:193)
at org.pentaho.reporting.ui.datasources.jdbc.ui.JdbcDataSourceDialog.performConfiguration(JdbcDataSourceDialog.java:788)
at org.pentaho.reporting.ui.datasources.jdbc.JdbcDataSourcePlugin.performEdit(JdbcDataSourcePlugin.java:71)
at org.pentaho.reporting.designer.core.actions.report.AddDataFactoryAction.actionPerformed(AddDataFactoryAction.java:78)
at javax.swing.AbstractButton.fireActionPerformed(AbstractButton.java:2018)
at javax.swing.AbstractButton$Handler.actionPerformed(AbstractButton.java:2341)
at javax.swing.DefaultButtonModel.fireActionPerformed(DefaultButtonModel.java:402)
at javax.swing.DefaultButtonModel.setPressed(DefaultButtonModel.java:259)
at javax.swing.AbstractButton.doClick(AbstractButton.java:376)
at javax.swing.plaf.basic.BasicMenuItemUI.doClick(BasicMenuItemUI.java:833)
at javax.swing.plaf.basic.BasicMenuItemUI$Handler.mouseReleased(BasicMenuItemUI.java:877)
at java.awt.AWTEventMulticaster.mouseReleased(AWTEventMulticaster.java:289)
at java.awt.Component.processMouseEvent(Component.java:6504)
at javax.swing.JComponent.processMouseEvent(JComponent.java:3321)
at java.awt.Component.processEvent(Component.java:6269)
at java.awt.Container.processEvent(Container.java:2229)
at java.awt.Component.dispatchEventImpl(Component.java:4860)
at java.awt.Container.dispatchEventImpl(Container.java:2287)
at java.awt.Component.dispatchEvent(Component.java:4686)
at java.awt.LightweightDispatcher.retargetMouseEvent(Container.java:4832)
at java.awt.LightweightDispatcher.processMouseEvent(Container.java:4492)
at java.awt.LightweightDispatcher.dispatchEvent(Container.java:4422)
at java.awt.Container.dispatchEventImpl(Container.java:2273)
at java.awt.Window.dispatchEventImpl(Window.java:2713)
at java.awt.Component.dispatchEvent(Component.java:4686)
at java.awt.EventQueue.dispatchEventImpl(EventQueue.java:707)
at java.awt.EventQueue.access$000(EventQueue.java:101)
at java.awt.EventQueue$3.run(EventQueue.java:666)
at java.awt.EventQueue$3.run(EventQueue.java:664)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:87)
at java.awt.EventQueue$4.run(EventQueue.java:680)
at java.awt.EventQueue$4.run(EventQueue.java:678)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$1.doIntersectionPrivilege(ProtectionDomain.java:76)
at java.awt.EventQueue.dispatchEvent(EventQueue.java:677)
at java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:211)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:128)
at java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:117)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:113)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:105)
at java.awt.EventDispatchThread.run(EventDispatchThread.java:90)
Caused by: org.pentaho.di.core.exception.KettleDatabaseException:
Driver class 'org.gjt.mm.mysql.Driver' could not be found, make sure the 'MySQL' driver (jar file) is installed.
org.gjt.mm.mysql.Driver
at org.pentaho.di.core.database.Database.connectUsingClass(Database.java:474)
at org.pentaho.di.core.database.Database.normalConnect(Database.java:399)
... 123 more
Caused by: java.lang.ClassNotFoundException: org.gjt.mm.mysql.Driver
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
at org.pentaho.di.core.database.Database.connectUsingClass(Database.java:467)
... 124 more
Hostname : localhost
Port : 3306
Database name : alger_fa

No Pentaho does-not support multi-source in one report..
have you placed this jar file mysql-connector-java-5.1.10 under below location
\pentaho_report_designer_working\report-designer\lib\jdbc\
if not then download and keep that jar file into above mention location , your problem will be solved.

Related

Mongo-Hadoop connector issue

I run file jar Word Count but have this error
NoClassDefFoundError: org/Apache/commons/lang/StringUtils ..
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/commons/lang/StringUtils
at com.mongodb.hadoop.util.MongoConfigUtil.getMongoURIs(MongoConfigUtil.java:409)
at com.mongodb.hadoop.util.MongoConfigUtil.getOutputURIs(MongoConfigUtil.java:598)
at com.mongodb.hadoop.MongoOutputFormat.checkOutputSpecs(MongoOutputFormat.java:32)
at org.apache.hadoop.mapreduce.JobSubmitter.checkSpecs(JobSubmitter.java:277)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:143)
at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1571)
at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1568)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1568)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1589)
at WordCount.main(WordCount.java:55)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

How do I configure the YARN address for yarn-client mode in spark?

From a remote scala program, using Spark 1.3, how do I initialize the sparkContext so that I can connect to Spark running on YARN? i.e. where do I put the address of the YARN node(s)?
Currently my program contains:
val conf = new SparkConf().setMaster("yarn-client").setAppName("MyApp")
val sc = new SparkContext(conf)
and it yields
[error] (run-main-0) java.lang.ExceptionInInitializerError
java.lang.ExceptionInInitializerError
at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
at SparkExampleLocalDriver$.main(SparkExample.scala:9)
at SparkExampleLocalDriver.main(SparkExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
Caused by: org.apache.spark.SparkException: Unable to load YARN support
at org.apache.spark.deploy.SparkHadoopUtil$.liftedTree1$1(SparkHadoopUtil.scala:217)
at org.apache.spark.deploy.SparkHadoopUtil$.<init>(SparkHadoopUtil.scala:212)
at org.apache.spark.deploy.SparkHadoopUtil$.<clinit>(SparkHadoopUtil.scala)
at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
at SparkExampleLocalDriver$.main(SparkExample.scala:9)
at SparkExampleLocalDriver.main(SparkExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.deploy.yarn.YarnSparkHadoopUtil
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:195)
at org.apache.spark.deploy.SparkHadoopUtil$.liftedTree1$1(SparkHadoopUtil.scala:213)
at org.apache.spark.deploy.SparkHadoopUtil$.<init>(SparkHadoopUtil.scala:212)
at org.apache.spark.deploy.SparkHadoopUtil$.<clinit>(SparkHadoopUtil.scala)
at org.apache.spark.util.Utils$.getSparkOrYarnConfig(Utils.scala:1959)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:104)
at org.apache.spark.storage.BlockManager.<init>(BlockManager.scala:179)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:310)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:163)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:269)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:272)
at SparkExampleLocalDriver$.main(SparkExample.scala:9)
at SparkExampleLocalDriver.main(SparkExample.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
Your Spark binary doesn't contain YARN related classes
Use pre-built binary for hadoop
http://www.apache.org/dyn/closer.cgi/spark/spark-1.3.1/spark-1.3.1-bin-hadoop2.4.tgz
If you are compiling source, include yarn and hadoop profiles
./make-distribution.sh --tgz --skip-java-test -Pyarn -Phadoop-2.4 -Dhadoop.version=2.4.0

Implementing the Hadoop and MongoDB connector

I am working with Hadoop for the very first time, since I am planning to use it with MongoDB. After installing Hadoop, I tried to to follow this tutorial and implement its example http://docs.mongodb.org/ecosystem/tutorial/getting-started-with-hadoop/
Everything works until I call this command
bash examples/treasury_yield/run_job.sh
Then I get the following message
14/03/11 17:52:45 INFO util.MongoTool: Created a conf: 'Configuration: core-defa
ult.xml, core-site.xml, src/examples/hadoop-local.xml, src/examples/mongo-defaul
ts.xml' on {class com.mongodb.hadoop.examples.treasury.TreasuryYieldXMLConfig} a
s job named '<unnamed MongoTool job>'
14/03/11 17:52:46 INFO util.MongoTool: Mapper Class: class com.mongodb.hadoop.ex
amples.treasury.TreasuryYieldMapper
14/03/11 17:52:46 INFO util.MongoTool: Setting up and running MapReduce job in f
oreground, will wait for results. {Verbose? true}
14/03/11 17:52:47 WARN fs.FileSystem: "localhost:9100" is a deprecated filesyste
m name. Use "hdfs://localhost:9100/" instead.
14/03/11 17:52:47 WARN hdfs.DFSClient: DataStreamer Exception: org.apache.hadoop
.ipc.RemoteException: java.io.IOException: File /tmp/hadoop-goncalopereira/mapre
d/staging/goncalopereira/.staging/job_201403111752_0001/job.jar could only be re
plicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
k(FSNamesystem.java:1639)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
a:736)
at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1149)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryI
nvocationHandler.java:85)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocat
ionHandler.java:62)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock
(DFSClient.java:3686)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStrea
m(DFSClient.java:3546)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClien
t.java:2749)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFS
Client.java:2989)
14/03/11 17:52:47 WARN hdfs.DFSClient: Error Recovery for block null bad datanod
e[0] nodes == null
14/03/11 17:52:47 WARN hdfs.DFSClient: Could not get block locations. Source fil
e "/tmp/hadoop-goncalopereira/mapred/staging/goncalopereira/.staging/job_2014031
11752_0001/job.jar" - Aborting...
14/03/11 17:52:47 INFO mapred.JobClient: Cleaning up the staging area hdfs://loc
alhost:9100/tmp/hadoop-goncalopereira/mapred/staging/goncalopereira/.staging/job
_201403111752_0001
14/03/11 17:52:47 ERROR security.UserGroupInformation: PriviledgedActionExceptio
n as:goncalopereira cause:org.apache.hadoop.ipc.RemoteException: java.io.IOExcep
tion: File /tmp/hadoop-goncalopereira/mapred/staging/goncalopereira/.staging/job
_201403111752_0001/job.jar could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
k(FSNamesystem.java:1639)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
a:736)
at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1149)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
14/03/11 17:52:47 ERROR util.MongoTool: Exception while executing job...
org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /tmp/hadoop-gon
calopereira/mapred/staging/goncalopereira/.staging/job_201403111752_0001/job.jar
could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
k(FSNamesystem.java:1639)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
a:736)
at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1149)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryI
nvocationHandler.java:85)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocat
ionHandler.java:62)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock
(DFSClient.java:3686)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStrea
m(DFSClient.java:3546)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClien
t.java:2749)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFS
Client.java:2989)
14/03/11 17:52:47 ERROR hdfs.DFSClient: Failed to close file /tmp/hadoop-goncalo
pereira/mapred/staging/goncalopereira/.staging/job_201403111752_0001/job.jar
org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /tmp/hadoop-gon
calopereira/mapred/staging/goncalopereira/.staging/job_201403111752_0001/job.jar
could only be replicated to 0 nodes, instead of 1
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBloc
k(FSNamesystem.java:1639)
at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.jav
a:736)
at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1149)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.
java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAcces
sorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryI
nvocationHandler.java:85)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocat
ionHandler.java:62)
at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock
(DFSClient.java:3686)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStrea
m(DFSClient.java:3546)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClien
t.java:2749)
at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFS
Client.java:2989)
As you can guess this is a bit overwhelming to a newbie like me. I assume it's some problem with Hadoop but not entirely sure what. I really wish someone here could point me in the right direction.
Hi i have connected hadoop with mongodb using mongoDBConnector using this link
hadoop connection with mongodb
You need to concentrate on this error:
ERROR security.UserGroupInformation: PriviledgedActionExceptio
n as:goncalopereira cause:org.apache.hadoop.ipc.RemoteException: java.io.IOExcep
tion: File /tmp/hadoop-goncalopereira/mapred/staging/goncalopereira/.staging/job
_201403111752_0001/job.jar could only be replicated to 0 nodes, instead of 1
Check if that jar is present on the path.
Check if you are starting you dataNode because it takes time to start.
Make sure your hadoop is installed correctly and try running a sample dataset just for hadoop without bringing MangoDB into picture. This will differentiate where things are going wrong. Hope it helps.

java.lang.NoClassDefFoundError: com/sybase/jdbcx/SybConnection weblogic

I am getting below error when I call my rest web service which uses a Sybase DB on WebLogic 10.3.5. I have a JNDI data source defined, I tested the connection and it was succesfull.
java.lang.NoClassDefFoundError: com/sybase/jdbcx/SybConnection
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:630)
at java.lang.ClassLoader.defineClass(ClassLoader.java:614)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141)
at weblogic.utils.classloaders.GenericClassLoader.defineClass(GenericClassLoader.java:343)
at weblogic.utils.classloaders.GenericClassLoader.findLocalClass(GenericClassLoader.java:302)
at weblogic.utils.classloaders.GenericClassLoader.findClass(GenericClassLoader.java:270)
at java.lang.ClassLoader.loadClass(ClassLoader.java:305)
at java.lang.ClassLoader.loadClass(ClassLoader.java:246)
at weblogic.utils.classloaders.GenericClassLoader.loadClass(GenericClassLoader.java:179)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:247)
at weblogic.utils.classloaders.GenericClassLoader.defineCodeGenClass(GenericClassLoader.java:523)
at weblogic.utils.classfile.utils.CodeGenerator.generateClass(CodeGenerator.java:73)
at weblogic.utils.wrapper.WrapperFactory.generateWrapperClass(WrapperFactory.java:340)
at weblogic.utils.wrapper.WrapperFactory.getWrapperClass(WrapperFactory.java:244)
at weblogic.utils.wrapper.WrapperFactory.getWrapperClass(WrapperFactory.java:190)
at weblogic.jdbc.wrapper.JDBCWrapperFactory$1.run(JDBCWrapperFactory.java:164)
at java.security.AccessController.doPrivileged(Native Method)
at weblogic.jdbc.wrapper.JDBCWrapperFactory.getWrapper(JDBCWrapperFactory.java:161)
at weblogic.jdbc.pool.Driver.allocateConnection(Driver.java:251)
at weblogic.jdbc.pool.Driver.connect(Driver.java:164)
at weblogic.jdbc.jts.Driver.getNonTxConnection(Driver.java:651)
at weblogic.jdbc.jts.Driver.connect(Driver.java:127)
at weblogic.jdbc.common.internal.RmiDataSource.getConnection(RmiDataSource.java:364)
at org.springframework.jdbc.datasource.DataSourceUtils.doGetConnection(DataSourceUtils.java:111)
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:77)
at org.mybatis.spring.SqlSessionUtils.getSqlSession(SqlSessionUtils.java:116)
at org.mybatis.spring.SqlSessionTemplate$SqlSessionInterceptor.invoke(SqlSessionTemplate.java:333)
at $Proxy102.selectOne(Unknown Source)
at org.mybatis.spring.SqlSessionTemplate.selectOne(SqlSessionTemplate.java:154)
at org.apache.ibatis.binding.MapperMethod.execute(MapperMethod.java:75)
at org.apache.ibatis.binding.MapperProxy.invoke(MapperProxy.java:38)
at $Proxy103.getAssetMap(Unknown Source)
at com.pbc.service.AssetController.GetHashMap(AssetController.java:38)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.springframework.web.bind.annotation.support.HandlerMethodInvoker.invokeHandlerMethod(HandlerMethodInvoker.java:176)
at org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter.invokeHandlerMethod(AnnotationMethodHandlerAdapter.java:436)
at org.springframework.web.servlet.mvc.annotation.AnnotationMethodHandlerAdapter.handle(AnnotationMethodHandlerAdapter.java:424)
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:790)
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:719)
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:644)
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:549)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:707)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:820)
at weblogic.servlet.internal.StubSecurityHelper$ServletServiceAction.run(StubSecurityHelper.java:227)
at weblogic.servlet.internal.StubSecurityHelper.invokeServlet(StubSecurityHelper.java:125)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:300)
at weblogic.servlet.internal.ServletStubImpl.execute(ServletStubImpl.java:183)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.wrapRun(WebAppServletContext.java:3717)
at weblogic.servlet.internal.WebAppServletContext$ServletInvocationAction.run(WebAppServletContext.java:3681)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:321)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:120)
at weblogic.servlet.internal.WebAppServletContext.securedExecute(WebAppServletContext.java:2277)
at weblogic.servlet.internal.WebAppServletContext.execute(WebAppServletContext.java:2183)
at weblogic.servlet.internal.ServletRequestImpl.run(ServletRequestImpl.java:1454)
at weblogic.work.ExecuteThread.execute(ExecuteThread.java:209)
at weblogic.work.ExecuteThread.run(ExecuteThread.java:178)
Caused by: java.lang.ClassNotFoundException: com.sybase.jdbcx.SybConnection
at weblogic.utils.classloaders.GenericClassLoader.findLocalClass(GenericClassLoader.java:297)
at weblogic.utils.classloaders.GenericClassLoader.findClass(GenericClassLoader.java:270)
at java.lang.ClassLoader.loadClass(ClassLoader.java:305)
at java.lang.ClassLoader.loadClass(ClassLoader.java:246)
at weblogic.utils.classloaders.GenericClassLoader.loadClass(GenericClassLoader.java:179)
... 61 more
jconn3.jar is there in classpath and also I am seeing it getting loaded when server starts.
I am helpless, I search everywhere but in vain.

Persevere datasource driver

I'm new to persevere, I did the helloWorld tutorial, very impressive btw but I'm now trying to connect to a real postgres database and find myself struggling having the driver recognized when I have the feeling it should be dead simple.
I dropped all the jdbc driver for postgres I had to the persevere/lib folder:
postgresql-8.3-603.jdbc2ee.jar
postgresql-8.3-603.jdbc2.jar
postgresql-8.3-603.jdbc3.jar
postgresql-8.3-603.jdbc4.jar
And yet when I start persevere with the command "persvr" with no argument from my project folder I get the following error message:
java.lang.ClassNotFoundException: org.postgresql.Driver
at java.net.URLClassLoader$1.run(URLClassLoader.java:200)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:188)
at java.lang.ClassLoader.loadClass(ClassLoader.java:306)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:276)
at java.lang.ClassLoader.loadClass(ClassLoader.java:251)
at org.mortbay.jetty.webapp.WebAppClassLoader.loadClass(WebAppClassLoader.java:379)
at org.mortbay.jetty.webapp.WebAppClassLoader.loadClass(WebAppClassLoader.java:341)
at java.lang.ClassLoader.loadClassInternal(ClassLoader.java:319)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:169)
at org.persvr.datasource.DatabaseDataSource.initParameters(Unknown Source)
at org.persvr.datasource.DatabaseTableDataSource.initParameters(Unknown Source)
at org.persvr.data.DataSourceManager.initSource(Unknown Source)
at org.persvr.data.DataSourceManager.<clinit>(Unknown Source)
at org.persvr.data.Identification.idForString(Unknown Source)
at org.persvr.Persevere.load(Unknown Source)
at org.persvr.job.SampleData.execute(Unknown Source)
at org.persvr.job.Upgrade.execute(Unknown Source)
at org.persvr.remote.PersevereFilter.init(Unknown Source)
at org.mortbay.jetty.servlet.FilterHolder.doStart(FilterHolder.java:97)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.servlet.ServletHandler.initialize(ServletHandler.java:620)
at org.mortbay.jetty.servlet.Context.startContext(Context.java:140)
at org.mortbay.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1234)
at org.mortbay.jetty.handler.ContextHandler.doStart(ContextHandler.java:517)
at org.mortbay.jetty.webapp.WebAppContext.doStart(WebAppContext.java:460)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerCollection.doStart(HandlerCollection.java:152)
at org.mortbay.jetty.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:156)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerWrapper.doStart(HandlerWrapper.java:130)
at org.mortbay.jetty.Server.doStart(Server.java:222)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.persvr.util.JettyStart.main(Unknown Source)
I'm actually trying to expand the helloWorld project as an exercize, so I've added a postgres.json file in my helloWorld/WEB-INF/config folder. Here is the content of that file:
{"id":"postgres.json",
"sources":[
{"name":"addressTable",
"sourceClass":"org.persvr.datasource.DatabaseTableDataSource",
"connection":"jdbc:postgresql://hostname:5432/pnp?user=user1&pass=mypassword",
"driver":"org.postgresql.Driver",
"camelCaseColumnNames": false,
"table":"addr",
"idColumn":"addr_id",
"dataColumns":["addr_stat_cd","addr1","addr2","city","state","zip","zip_ext","cntry_cd","last_upd_uid","last_upd_dttm","create_uid","create_dt
tm"],
"schema":{
"data":{"$ref":"../addressTable/"}
}
}]
}
Could anyone tell me why persevere can't find that postgres driver please?
Thanks,
Lancelot.
Figured it out. The postgres driver jar should actually go into the folder: persevere/WEB-INF/lib instead of the persevere/lib.