I have installed Hadoop 2.6 in centos7 and it's running fine. But when I run a jar exported from Eclipse, it gives the following error:
[root#myspark ~]# hadoop jar fengcount.jar intput output1
17/05/26 21:24:51 INFO client.RMProxy: Connecting to ResourceManager
at myspark/192.168.44.100:8032 17/05/26 21:24:53 INFO mapreduce.JobSubmitter: Cleaning up the staging area /tmp/hadoop-yarn/staging/root/.staging/job_1495765615548_0004 Exception in thread "main" org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: hdfs://myspark:54310/user/root/intput
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:321)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:264)
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:385)
at org.apache.hadoop.mapreduce.JobSubmitter.writeNewSplits(JobSubmitter.java:302)
at org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:319)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:197)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1297)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1294)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1294)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1315)
at hdfs.hadoop_hdfs.fengcount.main(fengcount.java:39)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
The file input/test1.txt actually exists:
[root#myspark ~]# hdfs dfs -ls -R
drwxr-xr-x - root supergroup 0 2017-05-26 21:02 input
-rw-r--r-- 1 root supergroup 16 2017-05-24 01:57 input/test1.txt
My code:
package hdfs.hadoop_hdfs;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class fengcount {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
// TODO Auto-generated method stub
Configuration conf=new Configuration();
String[] otherargs=new GenericOptionsParser(conf,args).getRemainingArgs();
if (otherargs.length!=2) {
System.err.println("Usage:fengcount<int><out>");
System.exit(2);
}
#SuppressWarnings("deprecation")
Job job=new Job(conf, "fengcount");
job.setJarByClass(fengcount.class);
job.setMapperClass(TokerizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherargs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherargs[1]));
System.exit(job.waitForCompletion(true)?0:1);
}
// mapper class
public static class TokerizerMapper extends Mapper<Object, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
#Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
// TODO Auto-generated method stub
System.out.println("key=" + key.toString());
System.out.println("value=" + value.toString());
StringTokenizer itr = new StringTokenizer(value.toString());
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
context.write(word, one);
}
}
}
//reduce process
public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
private IntWritable result = new IntWritable();
#Override
public void reduce(Text key, Iterable<IntWritable> values,
Reducer<Text, IntWritable, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
}
//mapreduce process
}
From the error log, I could see hdfs://myspark:54310/user/root/intput, I suspect its incorrect. I guess the path ends with input.
Good luck!
Related
package com.cybersource.schemas.transaction_data.transactionprocessor;
import java.io.IOException;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
imp ort java.rmi.RemoteException;
import java.util.HashMap;
import java.util.Map;
import org.apache.cxf.version.Version;
import com.cybersource.schemas.transaction_data_1.BillTo;
import com.cybersource.schemas.transaction_data_1.CCAuthService;
import com.cybersource.schemas.transaction_data_1.Card;
import com.cybersource.schemas.transaction_data_1.Item;
import com.cybersource.schemas.transaction_data_1.PurchaseTotals;
import com.cybersource.schemas.transaction_data_1.ReplyMessage;
import com.cybersource.schemas.transaction_data_1.RequestMessage;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.endpoint.Endpoint;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.ws.security.wss4j.WSS4JInInterceptor;
import org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor;
import org.apache.ws.security.WSConstants;
import org.apache.ws.security.WSPasswordCallback;
//import org.apache.wss4j.common.ext.WSPasswordCallback;
import org.apache.ws.security.handler.WSHandlerConstants;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.UnsupportedCallbackException;
public class CybersourceClientExample {
// Replace the MERCHANT_ID and MERCHANT_KEY with the appropriate --donevalues.
private static final String MERCHANT_ID = "MERCHANT_ID ";
private static final String MERCHANT_KEY = "MERCHANT_KEY ";
private static final String SERVER_URL = "https://ics2wstesta.ic3.com/commerce/1.x/transactionProcessor/CyberSourceTransaction_1.142.wsdl";
private static final String CLIENT_LIB_VERSION = Version.getCompleteVersionString() + "/1.5.10"; // CXF Version / WSS4J Version
private static final String CLIENT_LIBRARY = "Java CXF WSS4J";
private static final String CLIENT_ENV = System.getProperty("os.name") + "/" +
System.getProperty("os.version") + "/" +
System.getProperty("java.vendor") + "/" +
System.getProperty("java.version");
public static void main(String[] args) throws RemoteException, MalformedURLException {
RequestMessage request = new RequestMessage();
// To help Cybersource troubleshoot any problems that you may encounter,
// include the following information about the client.
addClientLibraryInfo(request);
request.setMerchantID(MERCHANT_ID);
// Internal Transaction Reference Code for the Merchant
request.setMerchantReferenceCode("222222");
// Here we are telling the client that we are going to run an AUTH.
request.setCcAuthService(new CCAuthService());
request.getCcAuthService().setRun("true");
request.setBillTo(buildBillTo());
request.setCard(buildCard());
request.setPurchaseTotals(buildPurchaseTotals());
request.getItem().add(buildItem("0", "12.34", "2"));
request.getItem().add(buildItem("1", "56.78", "1"));
ITransactionProcessor processor = new TransactionProcessor(new URL(SERVER_URL)).getPortXML();
// Add WS-Security Headers to the Request
addSecurityValues(processor);
ReplyMessage reply = processor.runTransaction(request);
System.out.println("decision = " + reply.getDecision());
System.out.println("reasonCode = " + reply.getReasonCode());
System.out.println("requestID = " + reply.getRequestID());
System.out.println("requestToken = " + reply.getRequestToken());
System.out.println("ccAuthReply.reasonCode = " + reply.getCcAuthReply().getReasonCode());
}
private static void addClientLibraryInfo(RequestMessage request) {
request.setClientLibrary(CLIENT_LIBRARY);
request.setClientLibraryVersion(CLIENT_LIB_VERSION);
request.setClientEnvironment(CLIENT_ENV);
}
private static Item buildItem(String id, String unitPrice, String quantity) {
Item item = new Item();
item.setId(new BigInteger(id));
item.setUnitPrice(unitPrice);
item.setQuantity(quantity);
return item;
}
private static PurchaseTotals buildPurchaseTotals() {
PurchaseTotals purchaseTotals = new PurchaseTotals();
purchaseTotals.setCurrency("USD");
purchaseTotals.setGrandTotalAmount("100");
return purchaseTotals;
}
private static Card buildCard() {
Card card = new Card();
card.setAccountNumber("4111111111111111");
card.setExpirationMonth(new BigInteger("12"));
card.setExpirationYear(new BigInteger("2020"));
return card;
}
private static BillTo buildBillTo() {
BillTo billTo = new BillTo();
billTo.setFirstName("John");
billTo.setLastName("Doe");
billTo.setStreet1("1295 Charleston Road");
billTo.setCity("Mountain View");
billTo.setState("CA");
billTo.setPostalCode("94043");
billTo.setCountry("US");
billTo.setEmail("null#cybersource.com");
billTo.setIpAddress("10.7.111.111");
return billTo;
}
private static void addSecurityValues(ITransactionProcessor processor) {
Client client = ClientProxy.getClient(processor);
Endpoint endpoint = client.getEndpoint();
// We'll have to add the Username and Password properties to an OutInterceptor
HashMap<String, Object> outHeaders = new HashMap<String, Object>();
outHeaders.put(WSHandlerConstants.ACTION, WSHandlerConstants.USERNAME_TOKEN);
outHeaders.put(WSHandlerConstants.USER, MERCHANT_ID);
outHeaders.put(WSHandlerConstants.PASSWORD_TYPE, WSConstants.PW_TEXT);
outHeaders.put(WSHandlerConstants.PW_CALLBACK_CLASS, ClientPasswordHandler.class.getName());
WSS4JOutInterceptor interceptor = new WSS4JOutInterceptor(outHeaders);
endpoint.getOutInterceptors().add(interceptor);
}
public static class ClientPasswordHandler implements CallbackHandler {
#Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (Callback callback : callbacks) {
if ((WSPasswordCallback)callback instanceof WSPasswordCallback) {
WSPasswordCallback passwordCallback = (WSPasswordCallback) callback;
passwordCallback.setPassword(MERCHANT_KEY);
}
}
}
}
}
I am facing the following error. Please help.
Dec 04, 2017 8:15:00 AM org.apache.cxf.wsdl.service.factory.ReflectionServiceFactoryBean buildServiceFromWSDL
INFO: Creating Service {urn:schemas-cybersource-com:transaction-data:TransactionProcessor}TransactionProcessor from WSDL: https://ics2wstesta.ic3.com/commerce/1.x/transactionProcessor/CyberSourceTransaction_1.142.wsdl
Dec 04, 2017 8:15:03 AM org.apache.cxf.phase.PhaseInterceptorChain doDefaultLogging
WARNING: Interceptor for {urn:schemas-cybersource-com:transaction-data:TransactionProcessor}TransactionProcessor#{urn:schemas-cybersource-com:transaction-data:TransactionProcessor}runTransaction has thrown exception, unwinding now
org.apache.cxf.binding.soap.SoapFault: Security processing failed.
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessageInternal(WSS4JOutInterceptor.java:269)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessage(WSS4JOutInterceptor.java:135)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessage(WSS4JOutInterceptor.java:122)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:308)
at org.apache.cxf.endpoint.ClientImpl.doInvoke(ClientImpl.java:518)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:427)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:328)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:281)
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:96)
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:139)
at com.sun.proxy.$Proxy36.runTransaction(Unknown Source)
at com.cybersource.schemas.transaction_data.transactionprocessor.CybersourceClientExample.main(CybersourceClientExample.java:77)
Caused by: org.apache.wss4j.common.ext.WSSecurityException: WSHandler: password callback failed
Original Exception was java.lang.ClassCastException: org.apache.wss4j.common.ext.WSPasswordCallback cannot be cast to org.apache.ws.security.WSPasswordCallback
at org.apache.wss4j.dom.handler.WSHandler.performPasswordCallback(WSHandler.java:1172)
at org.apache.wss4j.dom.handler.WSHandler.getPasswordCB(WSHandler.java:1130)
at org.apache.wss4j.dom.action.UsernameTokenAction.execute(UsernameTokenAction.java:43)
at org.apache.wss4j.dom.handler.WSHandler.doSenderAction(WSHandler.java:234)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor.access$100(WSS4JOutInterceptor.java:54)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessageInternal(WSS4JOutInterceptor.java:261)
... 11 more
Caused by: java.lang.ClassCastException: org.apache.wss4j.common.ext.WSPasswordCallback cannot be cast to org.apache.ws.security.WSPasswordCallback
at com.cybersource.schemas.transaction_data.transactionprocessor.CybersourceClientExample$ClientPasswordHandler.handle(CybersourceClientExample.java:152)
at org.apache.wss4j.dom.handler.WSHandler.performPasswordCallback(WSHandler.java:1170)
... 16 more
Exception in thread "main" javax.xml.ws.soap.SOAPFaultException: Security processing failed.
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:161)
at com.sun.proxy.$Proxy36.runTransaction(Unknown Source)
at com.cybersource.schemas.transaction_data.transactionprocessor.CybersourceClientExample.main(CybersourceClientExample.java:77)
Caused by: org.apache.wss4j.common.ext.WSSecurityException: WSHandler: password callback failed
Original Exception was java.lang.ClassCastException: org.apache.wss4j.common.ext.WSPasswordCallback cannot be cast to org.apache.ws.security.WSPasswordCallback
at org.apache.wss4j.dom.handler.WSHandler.performPasswordCallback(WSHandler.java:1172)
at org.apache.wss4j.dom.handler.WSHandler.getPasswordCB(WSHandler.java:1130)
at org.apache.wss4j.dom.action.UsernameTokenAction.execute(UsernameTokenAction.java:43)
at org.apache.wss4j.dom.handler.WSHandler.doSenderAction(WSHandler.java:234)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor.access$100(WSS4JOutInterceptor.java:54)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessageInternal(WSS4JOutInterceptor.java:261)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessage(WSS4JOutInterceptor.java:135)
at org.apache.cxf.ws.security.wss4j.WSS4JOutInterceptor$WSS4JOutInterceptorInternal.handleMessage(WSS4JOutInterceptor.java:122)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:308)
at org.apache.cxf.endpoint.ClientImpl.doInvoke(ClientImpl.java:518)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:427)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:328)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:281)
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:96)
at org.apache.cxf.jaxws.JaxWsClientProxy.invoke(JaxWsClientProxy.java:139)
... 2 more
Caused by: java.lang.ClassCastException: org.apache.wss4j.common.ext.WSPasswordCallback cannot be cast to org.apache.ws.security.WSPasswordCallback
at com.cybersource.schemas.transaction_data.transactionprocessor.CybersourceClientExample$ClientPasswordHandler.handle(CybersourceClientExample.java:152)
at org.apache.wss4j.dom.handler.WSHandler.performPasswordCallback(WSHandler.java:1170)
... 16 more
"Caused by: java.lang.ClassCastException: org.apache.wss4j.common.ext.WSPasswordCallback cannot be cast to org.apache.ws.security.WSPasswordCallback at " - looks like you are mixing WSS4J versions incorrectly. Have you verified that all of the WSS4J jars on the classpath have the same version? Also have you checked that the WSS4J version is the correct one that should be used with the version of CXF you are using?
//I am getting exception while running hadoop jar file which convert pdf to //text and parse to mapper
java.lang.Exception: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, received org.apache.hadoop.io.LongWritable
at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489)
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549)
Caused by: java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, received org.apache.hadoop.io.LongWritable
at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.collect(MapTask.java:1072)
at org.apache.hadoop.mapred.MapTask$NewOutputCollector.write(MapTask.java:715)
at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:89)
at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:112)
at org.apache.hadoop.mapreduce.Mapper.map(Mapper.java:125)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:146)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:787)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
17/06/08 19:12:10 INFO mapreduce.Job: Job job_local815278758_0001 running in uber mode : false
17/06/08 19:12:10 INFO mapreduce.Job: map 0% reduce 0%
17/06/08 19:12:10 INFO mapreduce.Job: Job job_local815278758_0001 failed with state FAILED due to: NA
17/06/08 19:12:10 INFO mapreduce.Job: Counters: 0
//Mapper class
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class WordCountMapper extends
Mapper<Object, Object, Object, Object> {
private Text word = new Text();
private final static LongWritable one = new LongWritable(1);
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
context.progress();
context.write(word, one);
}
}
}
//Reducer class
package com.amal.pdf;
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WordCountReducer extends
Reducer<Object, Object, Object, Object> {
protected void reduce(Text key, Iterable<LongWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (LongWritable value : values) {
sum += value.get();
}
context.write(key, new LongWritable(sum));
}
}
//PDF record Reader class
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.util.PDFTextStripper;
public class PdfRecordReader extends RecordReader<Object, Object> {
private String[] lines = null;
private LongWritable key = null;
private Text value = null;
#Override
public void initialize(InputSplit genericSplit, TaskAttemptContext context)
throws IOException, InterruptedException {
FileSplit split = (FileSplit) genericSplit;
Configuration job = context.getConfiguration();
final Path file = split.getPath();
/*
* The below code contains the logic for opening the file and seek to
* the start of the split. Here we are applying the Pdf Parsing logic
*/
FileSystem fs = file.getFileSystem(job);
FSDataInputStream fileIn = fs.open(split.getPath());
PDDocument pdf = null;
String parsedText = null;
PDFTextStripper stripper;
pdf = PDDocument.load(fileIn);
stripper = new PDFTextStripper();
//getting exception because of this line
parsedText = stripper.getText(pdf);
this.lines = parsedText.split("\n"); }
#Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (key == null) {
key = new LongWritable();
key.set(1);
value = new Text();
value.set(lines[0]);
} else {
int temp = (int) key.get();
if (temp < (lines.length - 1)) {
int count = (int) key.get();
value = new Text();
value.set(lines[count]);
count = count + 1;
key = new LongWritable(count);
} else {
return false;
}
}
if (key == null || value == null) {
return false;
} else {
return true;
}
}
#Override
public LongWritable getCurrentKey() throws IOException,
InterruptedException {
return key;
}
#Override
public Text getCurrentValue() throws IOException, InterruptedException {
return value;
}
#Override
public float getProgress() throws IOException, InterruptedException {
return 0;
}
#Override
public void close() throws IOException {
}
}
//One more thing can anyone help to create runnable jar, configuration is not //showing inside eclipse because main is for hadoop environment.
The error on your console says:
Caused by: java.io.IOException:
Type mismatch in key from map: expected org.apache.hadoop.io.Text, received org.apache.hadoop.io.LongWritable
That means that the key-value pair you are providing to your mapper doesn't match it's definition.
Your mapper class should look something like this:
public class WordCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
private Text word = new Text();
private final static IntWritable one = new IntWritable(1);
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
context.progress();
context.write(word, one);
}
}
}
This question already has answers here:
Why am I getting a NoClassDefFoundError in Java?
(31 answers)
Closed 5 years ago.
In this, I overewrite class PdfInputFormat with FileInputFormat class. This class is returning object of PdfRecordReader class which is doing all PDF conversion. I am facing an error here.
I am creating the jar in Eclipse by going to :
Tool > Eclipse - Method of exporting > export > create jar.
I am selecting the package required libraries in the jar.
I am executing the jar using the following command:
hadoop jar /home/tcs/converter.jar com.amal.pdf.PdfInputDriver /user/tcs/wordcountfile.pdf /user/convert
After running this I get the following exception:
17/06/09 09:26:51 WARN mapred.LocalJobRunner: job_local1466878685_0001
java.lang.Exception: java.lang.NoClassDefFoundError: org/apache/fontbox/cmap/CMapParser
at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:489)
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:549)
Caused by: java.lang.NoClassDefFoundError: org/apache/fontbox/cmap/CMapParser
at org.apache.pdfbox.pdmodel.font.PDFont.parseCmap(PDFont.java:548)
at org.apache.pdfbox.pdmodel.font.PDFont.encode(PDFont.java:383)
at org.apache.pdfbox.util.PDFStreamEngine.processEncodedText(PDFStreamEngine.java:372)
at org.apache.pdfbox.util.operator.ShowTextGlyph.process(ShowTextGlyph.java:61)
at org.apache.pdfbox.util.PDFStreamEngine.processOperator(PDFStreamEngine.java:552)
at org.apache.pdfbox.util.PDFStreamEngine.processSubStream(PDFStreamEngine.java:248)
at org.apache.pdfbox.util.PDFStreamEngine.processStream(PDFStreamEngine.java:207)
at org.apache.pdfbox.util.PDFTextStripper.processPage(PDFTextStripper.java:367)
at org.apache.pdfbox.util.PDFTextStripper.processPages(PDFTextStripper.java:291)
at org.apache.pdfbox.util.PDFTextStripper.writeText(PDFTextStripper.java:247)
at org.apache.pdfbox.util.PDFTextStripper.getText(PDFTextStripper.java:180)
at com.amal.pdf.PdfRecordReader.initialize(PdfRecordReader.java:43)
at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.initialize(MapTask.java:548)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:786)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:270)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: org.apache.fontbox.cmap.CMapParser
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 21 more
17/06/09 09:26:52 INFO mapreduce.Job: Job job_local1466878685_0001 failed with state FAILED due to: NA
17/06/09 09:26:52 INFO mapreduce.Job: Counters: 0
false
Here is the code:
PdfRecordReader class(code)
package com.amal.pdf;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.util.PDFTextStripper;
public class PdfRecordReader extends RecordReader<Object, Object>
{
private String[] lines = null;
private LongWritable key = null;
private Text value = null;
#Override
public void initialize(InputSplit genericSplit, TaskAttemptContext context)
throws IOException, InterruptedException {
FileSplit split = (FileSplit) genericSplit;
Configuration job = context.getConfiguration();
final Path file = split.getPath();
/*
* The below code contains the logic for opening the file and seek to
* the start of the split. Here we are applying the Pdf Parsing logic
*/
FileSystem fs = file.getFileSystem(job);
FSDataInputStream fileIn = fs.open(split.getPath());
PDDocument pdf = null;
String parsedText = null;
PDFTextStripper stripper;
pdf = PDDocument.load(fileIn);
stripper = new PDFTextStripper();
//getting exception because of this line****
parsedText = stripper.getText(pdf);
this.lines = parsedText.split("\n"); }
#Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (key == null) {
key = new LongWritable();
key.set(1);
value = new Text();
value.set(lines[0]);
} else {
int temp = (int) key.get();
if (temp < (lines.length - 1)) {
int count = (int) key.get();
value = new Text();
value.set(lines[count]);
count = count + 1;
key = new LongWritable(count);
} else {
return false;
}
}
if (key == null || value == null) {
return false;
} else {
return true;
}
}
#Override
public LongWritable getCurrentKey() throws IOException,
InterruptedException {
return key;
}
#Override
public Text getCurrentValue() throws IOException, InterruptedException {
return value;
}
#Override
public float getProgress() throws IOException, InterruptedException {
return 0;
}
#Override
public void close() throws IOException {
}
}
//Note: Since it is for HADOOP environment, using eclipse will not make //runnable JAR for this project.
// Is there anyway to export this project as a runnable JAR.
//Need help to understand what I am doing wrong.
The error is because hadoop could not find org.apache.fontbox.cmap.CMapParser class which should be an external library that you have imported in your code.
The external dependent jar was not packaged with the jar you used for hadoop command and thus hadoop system couldn't find the jar in hdfs. This is because when we run hadoop command codes (jars) get distributed to where data lies in hdfs cluster and thus the dependent jar was not found.
There are two solutions you can follow:
1 ) you can include the external jars with hadoop command as
hadoop jar /home/tcs/converter.jar com.amal.pdf.PdfInputDriver -libjars <path to external jars comma separated> /user/tcs/wordcountfile.pdf /user/convert
2) or you can use shade plugin and create a uber jar by including all dependent libraries inside your own jar.
This question already has answers here:
Passing Parameters JavaFX FXML
(10 answers)
Closed 5 years ago.
EDIT: Someone marked this as duplicate. I've read through the other question several times but I don't really understand how I can apply this to my program. It would be really nice if someone could help me in this specific context as I don't have much knowledge about Java yet. A short starting point would maybe even help me out. My question has nothing to do with a popup.
I have a problem. I don't wanna put the server code into the initialize() method of FXMLController. Instead I put the server start code into the start() method of MainApp and created a RemoteReader class. But how do I get the in and output stream variables from RemoteReader or MainApp into the FXMLController class? I'm using SceneBuilder.
Code:
FXMLController.java:
package de.freakyonline.ucone;
import de.freakyonline.ucone.Player;
import de.freakyonline.ucone.PlayerList;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.application.Platform;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.fxml.FXML;
import javafx.geometry.Insets;
import javafx.scene.Scene;
import javafx.scene.control.ContextMenu;
import javafx.scene.control.Label;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.Tab;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.Tooltip;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.input.ContextMenuEvent;
import javafx.scene.input.InputMethodEvent;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.StackPane;
import javafx.scene.web.HTMLEditor;
import javafx.stage.Modality;
import javafx.stage.Stage;
public class FXMLController {
#FXML
private ResourceBundle resources;
#FXML
private URL location;
#FXML
private BorderPane borderPane;
#FXML
private TableView<Player> playerTable;
final Tooltip playerTableToolTip = new Tooltip("Rightclick for more options ...");
#FXML
private TableColumn<Player, String> nickColumn;
#FXML
private TableColumn<Player, String> groupColumn;
#FXML
private TableColumn<Player, String> yearOfBirthColumn;
#FXML
private TableColumn<Player, Integer> ageColumn;
#FXML
private TableColumn<Player, String> genderColumn;
#FXML
private TableColumn<Player, String> lastQuitColumn;
#FXML
private Tab consoleOneTab;
#FXML
private MenuBar mainMenuBar;
#FXML
private TextArea consoleOneTextArea;
#FXML
private TextField consoleOneTextField;
#FXML
void handleConsoleOneAction(ActionEvent event) {
switch(consoleOneTextField.getText().toLowerCase()) {
case "freaky":
consoleOneTextArea.appendText("Freaky rulez! :D\n");
break;
case "ky3ak":
consoleOneTextArea.appendText("Ky3ak rulez! :D\n");
break;
case "testserver":
consoleOneTextArea.appendText("Sending an object ...");
// PROBLEM: I don't know how I can get the out variable of remote (RemoteReader) to here.
remote.out.writeObject(new Player("freakyy85","Owner","1810",31,"m","missing..."));
case "help":
consoleOneTextArea.appendText("This console is mainly to log stuff which is done by the program to the user, so they can see what's going on.");
break;
default: consoleOneTextArea.appendText("Unknown Command\n");
}
consoleOneTextField.clear();
}
#FXML
void handleConsoleOneTabSelected(Event event) {
consoleOneTextField.requestFocus();
}
#FXML
void handleFileClose(ActionEvent event) {
Platform.exit();
}
#FXML
void handleHelpAbout(ActionEvent event) {
Stage haStage = new Stage();
haStage.setTitle("Help --> About");
Label aboutText = new Label("UCOne by freakyy85\nInitially developed for Ky3ak and UnityCraft");
aboutText.setPadding(new Insets(20));
haStage.setScene(new Scene(new StackPane(aboutText)));
haStage.initOwner(borderPane.getScene().getWindow());
haStage.initModality(Modality.WINDOW_MODAL);
haStage.show();
}
#FXML
void handlePlayerEditCommit(TableColumn.CellEditEvent<Player, String> event) {
System.out.println(event.getRowValue().toString());
}
#FXML
void handleTextChanged(InputMethodEvent event) {
}
#FXML
private void handlePTContextMenuRequest(ContextMenuEvent event) {
System.out.println("Target: " + event.getTarget().toString());
System.out.println("Source: " + event.getSource().toString());
final ContextMenu playerTableContextMenu = new ContextMenu();
MenuItem testMenuItem = new MenuItem("Test");
testMenuItem.setOnAction( e -> consoleOneTextArea.appendText("Used ContextMenu in Playertable, here: " + event.getTarget().toString()));
MenuItem colorizeFont = new MenuItem("Colorize Font");
colorizeFont.setOnAction( e -> consoleOneTextArea.appendText("PickResult: " + event.getPickResult().toString()));
MenuItem makeLocalNotes = new MenuItem("Local Player Notes");
makeLocalNotes.setOnAction( (e) -> {
Stage plnStage = new Stage();
plnStage.setTitle("(nickHere) - PlayerLocalNotesEditor");
HTMLEditor playerLocalNotes = new HTMLEditor();
plnStage.setScene(new Scene(new StackPane(playerLocalNotes)));
plnStage.initOwner(borderPane.getScene().getWindow());
plnStage.initModality(Modality.WINDOW_MODAL);
plnStage.show();
});
playerTableContextMenu.getItems().add(testMenuItem);
playerTableContextMenu.getItems().add(colorizeFont);
playerTableContextMenu.getItems().add(makeLocalNotes);
playerTableContextMenu.show(borderPane.getScene().getWindow(),event.getScreenX(),event.getScreenY());
}
#FXML
void initialize() {
assert nickColumn != null : "fx:id=\"nickColumn\" was not injected: check your FXML file 'Scene.fxml'.";
assert groupColumn != null : "fx:id=\"groupColumn\" was not injected: check your FXML file 'Scene.fxml'.";
PlayerList playerList = new PlayerList();
playerTable.setItems(playerList.playerList);
nickColumn.setCellValueFactory(new PropertyValueFactory<Player,String>("nick"));
groupColumn.setCellValueFactory(new PropertyValueFactory<Player,String>("group"));
yearOfBirthColumn.setCellValueFactory(new PropertyValueFactory<Player,String>("yearOfBirth"));
yearOfBirthColumn.setCellFactory(TextFieldTableCell.forTableColumn());
ageColumn.setCellValueFactory(new PropertyValueFactory<Player,Integer>("age"));
genderColumn.setCellValueFactory(new PropertyValueFactory<Player,String>("gender"));
genderColumn.setCellFactory(TextFieldTableCell.forTableColumn());
lastQuitColumn.setCellValueFactory(new PropertyValueFactory<Player,String>("lastQuit"));
playerTable.setTooltip(playerTableToolTip);
}
}
MainApp.java:
package de.freakyonline.ucone;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import javafx.application.Application;
import static javafx.application.Application.launch;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
public class MainApp extends Application {
String ver = "v0.1-SNAPSHOT";
ObjectOutputStream out;
ObjectInputStream in;
#Override
public void start(Stage stage) throws Exception {
Parent root = FXMLLoader.load(getClass().getResource("/fxml/Scene.fxml"));
// Connect to Server
try {
Socket sock = new Socket("unitycraft.de", 2009);
out = new ObjectOutputStream(sock.getOutputStream());
in = new ObjectInputStream(sock.getInputStream());
// Listen for remote stuff comming in ...
Thread remote = new Thread(new RemoteReader(in,out,sock));
remote.start();
} catch (Exception ex) { ex.printStackTrace(); }
Scene scene = new Scene(root);
scene.getStylesheets().add("/styles/Styles.css");
stage.setTitle("UCOne - The UnityCraft Staff Tool " + ver);
stage.setScene(scene);
stage.show();
}
/**
* The main() method is ignored in correctly deployed JavaFX application.
* main() serves only as fallback in case the application can not be
* launched through deployment artifacts, e.g., in IDEs with limited FX
* support. NetBeans ignores main().
*
* #param args the command line arguments
*/
public static void main(String[] args) {
launch(args);
}
}
RemoteReader.java:
package de.freakyonline.ucone;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
/**
*
* #author uwe
*/
public class RemoteReader implements Runnable {
Object obj = null;
ObjectInputStream in;
ObjectOutputStream out;
Socket sock;
public RemoteReader (ObjectInputStream in, ObjectOutputStream out, Socket sock) {
this.in = in;
this.out = out;
this.sock = sock;
}
public void run() {
try {
while((obj=in.readObject()) != null)
System.out.println("Got object from server ...");
} catch (Exception ex) { ex.printStackTrace(); }
}
}
Btw, I'm currently learning. ;)
I got it working. I changed the main class to this:
public class MainApp extends Application {
String ver = "v0.1-SNAPSHOT";
ObjectOutputStream out;
ObjectInputStream in;
Socket sock;
Thread remote;
#Override
public void start(Stage stage) throws Exception {
FXMLLoader root = new FXMLLoader(
getClass().getResource("/fxml/Scene.fxml")
);
// Parent root = FXMLLoader.load(getClass().getResource("/fxml/Scene.fxml"));
// Connect to Server
try {
Socket sock = new Socket("unitycraft.de", 2009);
out = new ObjectOutputStream(sock.getOutputStream());
in = new ObjectInputStream(sock.getInputStream());
// Listen for remote stuff comming in ...
Thread remote = new Thread(new RemoteReader(in,out,sock));
remote.start();
} catch (Exception ex) { ex.printStackTrace(); }
Scene scene = new Scene(root.load());
scene.getStylesheets().add("/styles/Styles.css");
FXMLController controller = root.<FXMLController>getController();
controller.initData(in,out,sock);
// Connect to Server
try {
Socket sock = new Socket("unitycraft.de", 2009);
out = new ObjectOutputStream(sock.getOutputStream());
in = new ObjectInputStream(sock.getInputStream());
// Listen for remote stuff comming in ...
remote = new Thread(new RemoteReader(in,out,sock,controller.));
remote.start();
} catch (Exception ex) { ex.printStackTrace(); }
stage.setTitle("UCOne - The UnityCraft Staff Tool " + ver);
stage.setScene(scene);
stage.show();
}
/**
* The main() method is ignored in correctly deployed JavaFX application.
* main() serves only as fallback in case the application can not be
* launched through deployment artifacts, e.g., in IDEs with limited FX
* support. NetBeans ignores main().
*
* #param args the command line arguments
*/
public static void main(String[] args) {
launch(args);
}
}
And I added in FXMLController this method, plus the declaration of the class fields (in,out,sock):
void initData(ObjectInputStream in, ObjectOutputStream out, Socket sock) {
this.in = in;
this.out = out;
this.sock = sock;
}
Now I can access the output stream from within FXMLController. But now I can't access the textarea from within RemoteReader.java. I started a new question. ;)
I am not able to compile my DriverClass at the job.waitforcompletion(boolean) clause.It gives me a NoClassFoundException.If I catch the exception ,the run method throws the error that its expecting a int value.I am using MapReduce New API.Could anyone suggest what is the issue :
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class Dist_Driver extends Configured implements Tool {
public int run(String args[]) throws IOException, InterruptedException {
// Configuration phase
// Configuration conf=new Configuration();
Job job = new Job(new Configuration());
job.setJarByClass(Dist_Driver.class);
// Mapper Reducer InputFormat
job.setInputFormatClass(FileInputFormat.class);
// Mapper and Reducer Class
job.setMapperClass(Dist_Mapper.class);
job.setReducerClass(DistCache_Reducer.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setInputFormatClass(KeyValueTextInputFormat.class);
// set FileInputOutput
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
// setting number of reduce tasks and submit it
job.setNumReduceTasks(2);
// Lets check if the file exist
File f1 = new File("/home/hdfs/trials_mapreduce_progams/emp_id");
if (f1.exists())
System.out.println("The Files Exists");
else
System.out.println("The File doesnot exist");
URI path1;
try {
path1 = new URI(
"/home/hdfs/trials_mapreduce_progams/emp_lookup.txt");
DistributedCache.addCacheFile(path1, job.getConfiguration());
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (job.waitForCompletion(true))
return 0;
else
return 1;
}
public static void main(String[] args) throws Exception {
int exitcode = ToolRunner.run(new Dist_Driver(), args);
System.exit(exitcode);
}
}
Just add the ClassNotFoundException to the run method signature
public int run(String args[]) throws IOException,
InterruptedException,
ClassNotFoundException {
The reason you get an error when you try and try/catch it is because if there is a ClassNotFoundException thrown during execution, there will be no return value, and the method has to return something.
If you really want to catch it, just return 1 in the catch clause, which is the error exit code