How to solve the following error in Eclipse? - eclipse

I have the following code, this is to create a graph from Wikipedia index. This code is trying to import Wikipedia graph into a graph.db directory.
// Copyright (c) 2012 Mirko Nasato
//
package org.graphipedia.dataimport.neo4j;
import java.util.HashMap;
import java.util.Map;
import org.neo4j.unsafe.batchinsert.BatchInserter;
import org.neo4j.unsafe.batchinsert.BatchInserters;
public class ImportGraph {
private final BatchInserter inserter;
private final Map<String, Long> inMemoryIndex;
public ImportGraph(String dataDir) {
inserter = BatchInserters.inserter(dataDir);
inserter.createDeferredSchemaIndex(WikiLabel.Page).on("title").create();
inMemoryIndex = new HashMap<String, Long>();
}
public static void main(String[] args) throws Exception {
if (args.length < 2) {
System.out.println("USAGE: ImportGraph <input-file> <data-dir>");
System.exit(255);
}
String inputFile = args[0];
String dataDir = args[1];
ImportGraph importer = new ImportGraph(dataDir);
importer.createNodes(inputFile);
importer.createRelationships(inputFile);
importer.finish();
}
public void createNodes(String fileName) throws Exception {
System.out.println("Importing pages...");
NodeCreator nodeCreator = new NodeCreator(inserter, inMemoryIndex);
long startTime = System.currentTimeMillis();
nodeCreator.parse(fileName);
long elapsedSeconds = (System.currentTimeMillis() - startTime) / 1000;
System.out.printf("\n%d pages imported in %d seconds.\n", nodeCreator.getPageCount(), elapsedSeconds);
}
public void createRelationships(String fileName) throws Exception {
System.out.println("Importing links...");
RelationshipCreator relationshipCreator = new RelationshipCreator(inserter, inMemoryIndex);
long startTime = System.currentTimeMillis();
relationshipCreator.parse(fileName);
long elapsedSeconds = (System.currentTimeMillis() - startTime) / 1000;
System.out.printf("\n%d links imported in %d seconds; %d broken links ignored\n",
relationshipCreator.getLinkCount(), elapsedSeconds, relationshipCreator.getBadLinkCount());
}
public void finish() {
inserter.shutdown();
}
}
However, every time I run this code, I am running into the following error.
Exception in thread "main" java.lang.Error: Unresolved compilation problem:
The method inserter(File) in the type BatchInserters is not applicable for the arguments (String)
at org.graphipedia.dataimport.neo4j.ImportGraph.<init>(ImportGraph.java:36)
at org.graphipedia.dataimport.neo4j.ImportGraph.main(ImportGraph.java:48)

Based on this javadoc
https://neo4j.com/docs/java-reference/current/javadocs/org/neo4j/unsafe/batchinsert/BatchInserters.html
BatchInserters needs a File, not "path/to/dir" string. You will need to create a file object and pass it in.
Code:
Add an import at the top.
import java.io.File
Then replace the following line
inserter = BatchInserters.inserter(dataDir);
with this
inserter = BatchInserters.inserter(new File(dataDir));

Related

How to extract error records while inserting into db table using JDBCIO apache beam in java

I am creating in memory PCollection and writing it into postgres sql. now, when I insert data into table, few records may throw exception and will not be inserted. how to extract such failed insert records when I start pipeline?
below is the code I have written for pipeline:
PipelineOptions options = PipelineOptionsFactory.create();
options.setRunner(FlinkRunner.class);
Pipeline p = Pipeline.create(options);
// Preparing dummy data
Collection<Stock> stockList = Arrays.asList(new Stock("AAP", 2000,"Apple Inc"),
new Stock("MSF", 3000, "Microsoft Corporation"),
new Stock("NVDA", 4000, "NVIDIA Corporation"),
new Stock("INT", 3200, "Intel Corporation"));
// Reading dummy data and save it into PCollection<Stock>
PCollection<Stock> data = p.apply(Create.of(stockList)
.withCoder(SerializableCoder.of(Stock.class)));
//insert
#SuppressWarnings("unused")
PDone insertData = data.apply(JdbcIO.<Stock>write()
.withDataSourceConfiguration(JdbcIO.DataSourceConfiguration
.create("org.postgresql.Driver","jdbc:postgresql://localhost:5432/postgres")
.withUsername("postgres").withPassword("sachin"))
.withStatement("insert into stocks values(?, ?, ?)")
.withPreparedStatementSetter(new JdbcIO.PreparedStatementSetter<Stock>() {
private static final long serialVersionUID = 1L;
public void setParameters(Stock element, PreparedStatement query) throws SQLException {
query.setString(1, element.getSymbol());
query.setLong(2, element.getPrice());
query.setString(3, element.getCompany());
}
}));
p.run().waitUntilFinish();
After going through all apache beam programming guide, i did not get any clue, So, copied JdbcIO and modified execute batch where I have separated inserted successful record and insert failed record by using TupleTags. now, It is working.
below is code for modified JdbcIO:
private static class WriteFn<T> extends DoFn<T, T> {
private static final int DEFAULT_BATCH_SIZE = 1;
private final Write<T> spec;
private DataSource dataSource;
private Connection connection;
private PreparedStatement preparedStatement;
**private TupleTag<T> validTupleTag;
private TupleTag<T> inValidTupleTag;**
private int batchCount;
public WriteFn(Write<T> spec) {
this.spec = spec;
}
#Setup
public void setup() throws Exception {
dataSource = spec.getDataSourceConfiguration().buildDatasource();
connection = dataSource.getConnection();
connection.setAutoCommit(false);
preparedStatement = connection.prepareStatement(spec.getStatement());
validTupleTag = spec.getValidTupleTag();
inValidTupleTag = spec.getInvalidTupleTag();
}
#StartBundle
public void startBundle() {
batchCount = 0;
}
#ProcessElement
public void processElement(#Element T record, MultiOutputReceiver out)
throws Exception {
preparedStatement.clearParameters();
spec.getPreparedStatementSetter().setParameters(record,
preparedStatement);
preparedStatement.addBatch();
batchCount++;
if (batchCount >= DEFAULT_BATCH_SIZE) {
if (batchCount > 0) {
try {
preparedStatement.executeBatch();
connection.commit();
**out.get(validTupleTag).output(record);**
} catch (SQLException e1) {
//TODO add logger
**out.get(inValidTupleTag).output(record);**
}
batchCount = 0;
}
}
}
and client code:
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.apache.beam.runners.flink.FlinkRunner;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionTuple;
import org.apache.beam.sdk.values.TupleTag;
/**
* #author sachin
* #date 18-Nov-2021
*/
public class BeamTest {
static List<Stock> stocks = new ArrayList<>();
public static void main(String[] args) {
System.setProperty("java.specification.version", "1.8");
process();
// read();
}
public static void process() {
final TupleTag<Stock> VALID = new TupleTag<Stock>() {
};
final TupleTag<Stock> INVALID = new TupleTag<Stock>() {
};
PipelineOptions options = PipelineOptionsFactory.create();
options.setRunner(FlinkRunner.class);
Pipeline p = Pipeline.create(options);
// Preparing dummy data
Collection<Stock> stockList = Arrays.asList(
new Stock("AAP", 2000, "Apple Inc"),
new Stock("MSF", 3000, "Microsoft Corporation"),
new Stock("NVDA", 4000, "NVIDIA Corporation"),
new Stock("INT", 3200, "Intel Corporation"));
// Reading dummy data and save it into PCollection<Stock>
PCollection<Stock> data =
p.apply(Create.of(stockList).
withCoder(SerializableCoder.of(Stock.class)));
// insert
PCollectionTuple pCollectionTupleResult = data.apply("write",
CustomJdbcIOWrite.<Stock>write()
.withDataSourceConfiguration(CustomJdbcIOWrite.DataSourceConfiguration
.create("org.postgresql.Driver",
"jdbc:postgresql://localhost:5432/postgres")
.withUsername("postgres").withPassword("sachin"))
.withStatement("insert into stocks values(?, ?,
?)").withValidTag(VALID).withInValidTag(INVALID)
.withPreparedStatementSetter(new
CustomJdbcIOWrite.PreparedStatementSetter<Stock>() {
private static final long serialVersionUID = 1L;
public void setParameters(Stock element,
PreparedStatement query) throws SQLException {
query.setString(1, element.getSymbol());
query.setLong(2, element.getPrice());
query.setString(3, element.getCompany());
}
}));
// get failed PCollection using INVALID tupletag
PCollection<Stock> failedPcollection =
pCollectionTupleResult.get(INVALID)
.setCoder(SerializableCoder.of(Stock.class));
failedPcollection.apply(ParDo.of(new DoFn<Stock, Stock>() {
private static final long serialVersionUID = 1L;
#ProcessElement
public void process(ProcessContext pc) {
System.out.println("Failed pCollection element:" +
pc.element().getCompany());
}
}));
//get failed PCollection using INVALID tupletag
PCollection<Stock> insertedPcollection =
pCollectionTupleResult.get(VALID)
.setCoder(SerializableCoder.of(Stock.class));
insertedPcollection.apply(ParDo.of(new DoFn<Stock, Stock>() {
private static final long serialVersionUID = 1L;
#ProcessElement
public void process(ProcessContext pc) {
System.out.println("Inserted pCollection element:" +
pc.element().getCompany());
}
}));
// run pipeline
State state = p.run().waitUntilFinish();
System.out.println("Data inserted successfully with state : " +
state);
}
}
below is the output as new Stock("NVDA", 4000, "NVIDIA Corporation") is intentianlly not inserted as my db column accept only 3 char "NVD" and not 4 chars "NVDA":
Inserted pCollection element:Microsoft Corporation
Failed pCollection element:NVIDIA Corporation
Inserted pCollection element:Intel Corporation
Inserted pCollection element:Apple Inc
Data inserted successfully with state : DONE
Full Details and github link

Get no of attempts in quartz

Can any one please tell me how can I get current attempt count in quartz.
Example : if Quartz scheduler is started with repeat count of 5. I want to get the current repeat count.
Here is the Example I am trying with
public class SimpleTriggerExample implements Job
{
int count = 0;
JobDetail job = null;
JobDataMap data = null;
public static void main( String[] args ) throws Exception
{
new SimpleTriggerExample().schedule();
}
public void schedule() throws ParseException, SchedulerException{
job = JobBuilder.newJob(SimpleTriggerExample.class)
.withIdentity("dummyJobName", "group1").build();
Trigger trigger = TriggerBuilder
.newTrigger()
.withIdentity("dummyTriggerName", "group1")
.withSchedule(SimpleScheduleBuilder.simpleSchedule()
.withIntervalInSeconds(10).withRepeatCount(3))
.build();
System.out.println("before in main jobdatamap");
Scheduler scheduler = new StdSchedulerFactory().getScheduler();
scheduler.start();
scheduler.scheduleJob(job, trigger);
}
public void execute(JobExecutionContext context)
throws JobExecutionException {
//count
data = context.getJobDetail().getJobDataMap();
System.out.println("after jobdatamap");
int count1 = data.getInt("EXECUTION_COUNT");
System.out.println("count1-->before"+count1);
count1++;
System.out.println("count1-->after"+count1);
job.getJobDataMap().put("EXECUTION_COUNT", count1);
count = count1;
System.out.println("count"+count);
}
}
Use JobDataMap along with #PersistJobDataAfterExecution annotation.
Make sure when you modify data in JobDataMap the key value should be same.
If you do like this you can persist your attempts as per your requirement.
Example Code Snippet:
package com.mss.quartz.demo;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.quartz.InterruptableJob;
import org.quartz.Job;
import org.quartz.JobBuilder;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.quartz.PersistJobDataAfterExecution;
import org.quartz.Scheduler;
import org.quartz.SchedulerContext;
import org.quartz.SchedulerException;
import org.quartz.SimpleScheduleBuilder;
import org.quartz.TriggerBuilder;
import org.quartz.UnableToInterruptJobException;
import org.quartz.impl.StdSchedulerFactory;
#PersistJobDataAfterExecution
public class HelloJob implements InterruptableJob
{
SchedulerContext schedulerContext = null;
testQuartz test = new testQuartz();
boolean result;
private boolean _interrupted = false;
private JobKey _jobKey = null;
Thread t = null;
//public static int count = 0;
public void interrupt() throws UnableToInterruptJobException {
System.out.println("---" + this._jobKey + " -- INTERRUPTING --");
this._interrupted = true;
}
public void execute(JobExecutionContext context)
throws JobExecutionException {
Scheduler scd = context.getScheduler();
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
String jobSays = dataMap.getString("test1");
int myFloatValue = dataMap.getIntValue("id");
System.out.println("In Job Class"+jobSays+ " "+myFloatValue+" Current time in
Job class "+new Date().toString());
JobKey jobKey = context.getJobDetail().getKey();
int attemps = dataMap.getInt("attempts");
attemps++;
dataMap.put("attempts", attemps);
System.out.println("After putting count in job data map:"+dataMap.get("attempts"));
}
}
Try to add the #PersistJobDataAfterExecution annotation to SimpleTriggerExample class:
#PersistJobDataAfterExecution
public class SimpleTriggerExample implements Job
{ ...}

Intersystems Cache using XEP

I am trying to extract data from the Samples namespace that comes with Intersystems Cache install. Specifically, I am trying to retrieve Sample.Company global data using XEP. Inorder to achieve this, I created Sample.Company class like this -
package Sample;
public class Company {
public Long id;
public String mission;
public String name;
public Long revenue;
public String taxId;
public Company(Long id, String mission, String name, Long revenue,
String taxId) {
this.id = id;
this.mission = mission;
this.name = name;
this.revenue = revenue;
this.taxId = taxId;
}
public Company() {
}
}
XEP related code looks like this -
import java.util.ArrayList;
import java.util.List;
import org.springframework.stereotype.Service;
import Sample.Company;
import com.intersys.xep.Event;
import com.intersys.xep.EventPersister;
import com.intersys.xep.EventQuery;
import com.intersys.xep.EventQueryIterator;
import com.intersys.xep.PersisterFactory;
import com.intersys.xep.XEPException;
#Service
public class CompanyService {
public List<Company> fetch() {
EventPersister myPersister = PersisterFactory.createPersister();
myPersister.connect("SAMPLES", "user", "pwd");
try { // delete any existing SingleStringSample events, then import
// new ones
Event.isEvent("Sample.Company");
myPersister.deleteExtent("Sample.Company");
String[] generatedClasses = myPersister.importSchema("Sample.Company");
for (int i = 0; i < generatedClasses.length; i++) {
System.out.println("Event class " + generatedClasses[i]
+ " successfully imported.");
}
} catch (XEPException e) {
System.out.println("import failed:\n" + e);
throw new RuntimeException(e);
}
EventQuery<Company> myQuery = null;
List<Company> list = new ArrayList<Company>();
try {
Event newEvent = myPersister.getEvent("Sample.Company");
String sql = "Select * from Sample.Company";
myQuery = newEvent.createQuery(sql);
newEvent.close();
myQuery.execute();
EventQueryIterator<Company> iterator = myQuery.getIterator();
while (iterator.hasNext()) {
Company c = iterator.next();
System.out.println(c);
list.add(c);
}
myQuery.close();
myPersister.close();
return list;
} catch (XEPException e) {
System.out.println("createQuery failed:\n" + e);
throw new RuntimeException(e);
}
}
}
When I try executing the fetch() method of the above class, I am seeing the following exception -
com.intersys.xep.XEPException: Cannot import - extent for Sample.Company not empty.
at com.intersys.xep.internal.Generator.generate(Generator.java:52)
at com.intersys.xep.EventPersister.importSchema(EventPersister.java:954)
at com.intersys.xep.EventPersister.importSchema(EventPersister.java:363)
I got the simple string example working. Does it mean, we can not read the existing data using XEP? If we can read, Could some please help me in resolving the above issue? Thanks in advance.
You are trying to create a new class named Sample.Company in your instance:
String[] generatedClasses = myPersister.importSchema("Sample.Company");
But you still have data and an existing class there.

Can't seems to load dlls in java Web Start

I'm having a huge problem with my java webstart application, I have tries a lot of solutions, but none seems to work correctly in th end.
I need to write a webstart applet to load basic hardware info about the client computer to check if my client can connect on our systems and use the software four our courses. I use Sigar to load the CPU and Memory information and then use JNI to load a custom c++ script that check the graphic card name (This one works perfectly).
I've put all my dlls in src/resources folder to load them in the jar, I also use what we call here "engines" which are classed that do specified operations (In our case, Jni Engine, Config Engine and Data Engine (Code below)), I'm new to webstart so I'm not sure if this concept works well with library loading.
I've tries to add the dlls in a jar as a library in Netbeans, I've tried to add the dlls in the jnlp, but each run recreates it and I can't add them with project properties, finnaly, I've built my Data Engine in a way that should load the dlls in the java temp directory in case they are not there, but Sigar still don't want to work. I've also put my dll in the java.library.path correctly cofigured (As it works in local).
It work when I run my main class locally (With right click-run), but when I click the run button to load the webstart, it crashes with this error message (it happens in ConfigEngine as it extends SigarBase) :
JNLPClassLoader: Finding library sigar-amd64-winnt.dll.dll
no sigar-amd64-winnt.dll in java.library.path
org.hyperic.sigar.SigarException: no sigar-amd64-winnt.dll in java.library.path
Here's the code :
JNi Engine (Loads the c++ code for the graphic card)
package Engine;
public class JniEngine
{
static private final String nomLibJni = "JniEngine";
static private final String nomLibJni64 = "JniEngine_x64";
static
{
if (System.getProperty("os.arch").contains("86"))
{
System.loadLibrary(nomLibJni);
}
else
{
System.loadLibrary(nomLibJni64);
}
}
public native String getInfoGPU() throws Error;
}
ConfigEngine
package Engine;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import org.hyperic.sigar.Sigar;
import org.hyperic.sigar.SigarException;
import org.hyperic.sigar.cmd.SigarCommandBase;
public class ConfigEngine extends SigarCommandBase
{
private final String nomOsAcceptes = "Windows";
static
{
DataEngine data;
}
public ConfigEngine()
{
super();
}
#Override
public void output(String[] args) throws SigarException
{
}
public HashMap<String, String> getMap() throws SigarException, SocketException
{
HashMap<String, String> hmConfig = new HashMap<>();
loadInfoCpu(hmConfig);
loadInfoRam(hmConfig);
loadInfoOs(hmConfig);
loadInfoNet(hmConfig);
loadInfoGpu(hmConfig);
return hmConfig;
}
private void loadInfoCpu(HashMap<String,String> Hashmap) throws SigarException
{
org.hyperic.sigar.CpuInfo[] configCpu = this.sigar.getCpuInfoList();
org.hyperic.sigar.CpuInfo infoCpu = configCpu[0];
long cacheSize = infoCpu.getCacheSize();
Hashmap.put("Builder", infoCpu.getVendor());
Hashmap.put("Model" , infoCpu.getModel());
Hashmap.put("Mhz", String.valueOf(infoCpu.getMhz()));
Hashmap.put("Cpus nbr", String.valueOf(infoCpu.getTotalCores()));
if ((infoCpu.getTotalCores() != infoCpu.getTotalSockets()) ||
(infoCpu.getCoresPerSocket() > infoCpu.getTotalCores()))
{
Hashmap.put("Cpus", String.valueOf(infoCpu.getTotalSockets()));
Hashmap.put("Core", String.valueOf(infoCpu.getCoresPerSocket()));
}
if (cacheSize != Sigar.FIELD_NOTIMPL) {
Hashmap.put("Cache", String.valueOf(cacheSize));
}
}
private void loadInfoRam(HashMap<String,String> Hashmap) throws SigarException
{
org.hyperic.sigar.Mem mem = this.sigar.getMem();
Hashmap.put("RAM" , String.valueOf(mem.getRam()));
Hashmap.put("Memoery", String.valueOf(mem.getTotal()));
Hashmap.put("Free", String.valueOf(mem.getUsed()));
}
private void loadInfoOs(HashMap<String,String> Hashmap) throws SigarException
{
Hashmap.put("OS", System.getProperty("os.name"));
Hashmap.put("Version", System.getProperty("os.version"));
Hashmap.put("Arch", System.getProperty("os.arch"));
}
private void loadInfoNet(HashMap<String,String> Hashmap) throws SocketException
{
List<NetworkInterface> interfaces = Collections.
list(NetworkInterface.getNetworkInterfaces());
int i = 1;
for (NetworkInterface net : interfaces)
{
if (!net.isVirtual() && net.isUp())
{
Hashmap.put("Port Name " + String.valueOf(i), net.getDisplayName());
}
i++;
}
}
private void loadInfoGpu(HashMap<String,String> Hashmap) throws SocketException
{
if (System.getProperty("os.name").contains(nomOsAcceptes))
{
JniEngine Jni = new JniEngine();
Hashmap.put("VGA", Jni.getInfoGPU());
}
}
}
Finally my Data Engine which tries to load all the dlls and change the library path (Most of it is temporary as it is patches on patches)
package Engine;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.net.URL;
public class DataEngine
{
static private final String nomLibSigar = "sigar-x86-winnt";
static private final String nomLibSigar64 = "sigar-amd64-winnt";
static private final String nomLibJni = "JniEngine";
static private final String nomLibJni64 = "JniEngine_x64";
static private final String NomJar86 = "lib_config_x86";
static private final String nomJar64 = "lib_config_x64";
static private final String path = "Resources\\";
static
{
try
{
if (System.getProperty("os.arch").contains("86"))
{
System.loadLibrary(nomLibJni);
System.loadLibrary(nomLibSigar);
}
else
{
System.loadLibrary(nomLibJni64);
System.loadLibrary(nomLibSigar64);
}
}
catch (UnsatisfiedLinkError ex)
{
loadJniFromJar();
loadSigarFromJar();
}
}
public static void loadSigarFromJar()
{
try
{
File dll;
InputStream is;
if (System.getProperty("os.arch").contains("86"))
{
is = DataEngine.class.getResourceAsStream(
path + nomLibSigar + ".dll");
dll = File.createTempFile(path + nomLibSigar, ".dll");
}
else
{
is = DataEngine.class.getResourceAsStream(
path + nomLibSigar64 + ".dll");
dll = File.createTempFile(path + nomLibSigar64, ".dll");
}
FileOutputStream fos = new FileOutputStream(dll);
byte[] array = new byte[1024];
for (int i = is.read(array);
i != -1;
i = is.read(array))
{
fos.write(array, 0, i);
}
fos.close();
is.close();
System.load(dll.getAbsolutePath());
System.setProperty("java.library.path", dll.getAbsolutePath());
}
catch (Throwable e)
{
}
}
public static void loadJniFromJar()
{
try
{
File dll;
InputStream is;
if (System.getProperty("os.arch").contains("86"))
{
is = DataEngine.class.getResourceAsStream(
path + nomLibJni + ".dll");
dll = File.createTempFile(path + nomLibJni, ".dll");
}
else
{
is = DataEngine.class.getResourceAsStream(
path + nomLibJni64 + ".dll");
dll = File.createTempFile(path + nomLibJni64, ".dll");
}
FileOutputStream fos = new FileOutputStream(dll);
byte[] array = new byte[1024];
for (int i = is.read(array);
i != -1;
i = is.read(array))
{
fos.write(array, 0, i);
}
fos.close();
is.close();
System.load(dll.getAbsolutePath());
}
catch (Throwable e)
{
}
}
}
I also have some problem with my main class (NetBeans don't want my JAppletForm to be the main class of my project, but I'll probably recreate the project anyway since the hundreds of patches I tries have corrupted the build. My main class simply load the HashMap with GetMap of ConfigEngine and show it in the console if local or in the JAppletForm if it runs with webstart.
Its a pretty big problem so I'll update my question with all the info you'll need if asked.

How to change the default folder for uploading file in jboss

I am trying to upload a file, I am trying to change the default location of the uploaded file. How to change this please suggest ?
package Controller;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Iterator;
import java.util.List;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemFactory;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FilenameUtils;
import requests.Connect;
import display.DisplayLog;
/** * Servlet implementation class ControlServlet
*/
public class ControlServlet extends HttpServlet{
private static final long serialVersionUID = 1L;
boolean result;
private boolean isMultipart;
private String filePath;
private int maxFileSize = 1000 * 1024;
private int maxMemSize = 1000 * 1024;
private File file ;
public ControlServlet() {
super();
// TODO Auto-generated constructor stub
}
/**
* #see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
/**
* #see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
HttpSession session=request.getSession();
String userName = request.getParameter("username");
isMultipart = ServletFileUpload.isMultipartContent(request);
response.setContentType("audio/mpeg3;audio/x-mpeg-3;video/mpeg;video/x-mpeg;text/xml");
PrintWriter out = response.getWriter( );
if (isMultipart) {
// Create a factory for disk-based file items
FileItemFactory factory = new DiskFileItemFactory();
// Create a new file upload handler
ServletFileUpload upload = new ServletFileUpload(factory);
try {
// Parse the request
List items = upload.parseRequest(request);
Iterator iterator = items.iterator();
while (iterator.hasNext()) {
FileItem item = (FileItem) iterator.next();
if (!item.isFormField())
{
String fileName = item.getName();
if (fileName != null) {
fileName = FilenameUtils.getName(fileName);
}
String root = getServletContext().getRealPath("/");
root = "F/images";
File path = new File(root + "/uploads");
if (!path.exists())
{
boolean status = path.mkdirs();
}
File uploadedFile = new File(path + "/" + fileName);
System.out.println(" Prashant File Upload Location is ");
// System.out.println(uploadedFile.getAbsolutePath());
System.out.println("fileName is " +fileName);
System.out.println("root is " + root);
System.out.println("path is " + path);
if(fileName!="")
{
item.write(uploadedFile);
System.out.println(" Prashant File Upload Location 2 is ");
System.out.println(uploadedFile.getAbsolutePath());
out.println("<h1>File Uploaded Successfully....:-)</h1>");
}
else
{
out.println(uploadedFile.getAbsolutePath());
out.println("<h1>File Uploaded Successfully....:-)</h1>");
System.out.println("file not found");
}
}
else
{
String abc = item.getString();
}
}
} catch (FileUploadException e) {
out.println(e);
} catch (Exception e) {
out.println(e);
}
}
else
{
out.println("Not Multipart");
}
System.out.println("print this Prashant" + userName);
session.setAttribute("username",userName);
request.setAttribute("username","prashant");
// RequestDispatcher myDispatch = request.getRequestDispatcher("Index.jsp");
//myDispatch.forward(request, response);
}
}
I am getting this as default folder F:\jboss-4.2.3.GA-jdk6\jboss-4.2.3.GA\bin\ please help i am new to this
Your problem is here:
String root = getServletContext().getRealPath("/");
you are setting the upload path to the containers path, not a default path but the place where the server started from. You can make the upload path anything you want it to be, it depends on your needs and configuration.
You could create a system property with the directory of your choice, you could set it as a dynamic property in a JBoss configuration file (not sure what that would be for JBoss4).