Partitions and JdbcPagingItemReader doesn't gives correct values - spring

I am working on Spring Batch and Partition using the JdbcPagingItemReader, but I am only getting half records.
If I am expecting 100 thousand records instead getting only 50 thousand. What's wrong is happening
How to use Nested or Inner Query in OraclePagingQueryProvider ?
My Original Query
SELECT q.*
FROM (SELECT DEPT.ID id,
DEPT.CREATOR createdby,
DEPT.CREATE_DATE createddate,
DEPT.UPDATED_BY updatedby,
DEPT.LAST_UPDATE_DATE updateddate,
DEPT.NAME name,
DEPT.STATUS status,
statusT.DESCR statusdesc,
REL.ROWID_DEPT1 rowidDEPT1,
REL.ROWID_DEPT2 rowidDEPT2,
DEPT2.DEPT_FROM_VAL parentcid,
DEPT2.NAME parentname,
ROW_NUMBER() OVER (PARTITION BY DEPT.CREATE_DATE ORDER BY DEPT.ID) AS rn
FROM TEST.DEPT_TABLE DEPT
LEFT JOIN TEST.STATUS_TABLE statusT
ON DEPT.STATUS = statusT.STATUS
LEFT JOIN TEST.C_REL_DEPT rel
ON DEPT.ID = REL.ROWID_DEPT2
LEFT JOIN TEST.DEPT_TABLE DEPT2
ON REL.ROWID_DEPT1 = DEPT2.ID) q
WHERE rn BETWEEN ? AND ?; // ? will be fromValue to toValue
Code:
#Configuration
public class CustomerJob2 {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public CustomerPartitioner customerPartitioner() {
return new CustomerPartitioner();
}
#Bean("readCustomerJob")
#Primary
public Job readCustomerJob() throws Exception {
return jobBuilderFactory.get("readCustomerJob")
.incrementer(new RunIdIncrementer())
.start(customerStepOne())
.build();
}
#Bean
public Step customerStepOne() throws Exception {
return stepBuilderFactory.get("customerStepOne")
.partitioner(slaveStep().getName(), customerPartitioner())
.step(slaveStep())
.gridSize(5)
.taskExecutor(new SimpleAsyncTaskExecutor())
.build();
}
// slave step
#Bean
public Step slaveStep() throws Exception {
return stepBuilderFactory.get("slaveStep")
.<Customer, Customer>chunk(3000)
.reader(pagingItemReader(null, null))
.writer(customerWriter())
.listener(customerStepOneExecutionListener())
.build();
}
// Reader
#Bean(destroyMethod = "")
#StepScope
public JdbcPagingItemReader<Customer> pagingItemReader(
#Value("#{stepExecutionContext['fromValue']}") Long fromValue,
#Value("#{stepExecutionContext['toValue']}") Long toValue) throws Exception {
System.out.println(" FROM = "+ fromValue + " TO VALUE ="+ toValue);
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setRowMapper(new CustomerRowMapper());
reader.setSaveState(false);
reader.setPageSize(3000);
// Sort Keys
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("id", Order.ASCENDING);
OraclePagingQueryProvider queryProvider = new OraclePagingQueryProvider();
queryProvider.setSelectClause("q.* FROM ( SELECT Row_Number() OVER (ORDER BY party.ROWID_OBJECT) MyRow, "
+ " OTHER coumns in the Query");
queryProvider.setFromClause("**** "
+ "LEFT JOIN ********* "
+ "LEFT JOIN ********* "
+ "LEFT JOIN ********* ) q ");
queryProvider.setWhereClause("MyRow BETWEEN "+ fromValue + " AND "+ toValue);
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
reader.afterPropertiesSet();
return reader;
}
#Bean
public CustomerWriter customerWriter() {
return new CustomerWriter();
}
}
Partition Logic
public class CustomerPartitioner implements Partitioner{
private static final String CUSTOMER_CNT = "SELECT COUNT(party.IS) ***** COMPLEX JOIN";
#Autowired
#Qualifier("edrJdbcTemplate")
private JdbcTemplate jdbcTemplate;
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
Long custCnt = jdbcTemplate.queryForObject(CUSTOMER_CNT, Long.class);
int toValue = 0;
int fromValue = 0;
int increment = 3000;
int counter = 0;
int temp = 0;
Map<String, ExecutionContext> partitionMap = new HashMap<>();
for (int i = 0; i < custCnt; i += increment) { // custCnt fives 100 thousand
counter++;
temp = i;
if(i == 0) {
fromValue = temp;
toValue = increment;
}else {
fromValue = toValue + 1;
toValue = fromValue + increment - 1;
}
ExecutionContext context = new ExecutionContext();
context.put("fromValue", fromValue);
context.put("toValue", toValue);
partitionMap.put("Thread--" + counter, context);
}
return partitionMap;
}
}
Here are the logs -
2020-06-22 22:44:14.750 INFO 15752 --- [ main] o.s.b.c.l.support.SimpleJobLauncher : Job: [SimpleJob: [name=readCustomerJob]] launched with th
e following parameters: [{JobID=1592846054670, date=1592846054670}]
2020-06-22 22:44:14.790 INFO 15752 --- [ main] o.s.batch.core.job.SimpleStepHandler : Executing step: [customerStepOne]
Cust Count = 1035483
FROM = 6001 TO VALUE =9000
FROM = 0 TO VALUE =3000
FROM = 3001 TO VALUE =6000
FROM = 9001 TO VALUE =12000
2020-06-22 22:44:15.874 DEBUG 15752 --- [cTaskExecutor-4] o.s.b.i.database.JdbcPagingItemReader : Reading page 0
2020-06-22 22:44:15.874 DEBUG 15752 --- [cTaskExecutor-1] o.s.b.i.database.JdbcPagingItemReader : Reading page 0
2020-06-22 22:44:15.874 DEBUG 15752 --- [cTaskExecutor-2] o.s.b.i.database.JdbcPagingItemReader : Reading page 0
2020-06-22 22:44:15.874 DEBUG 15752 --- [cTaskExecutor-3] o.s.b.i.database.JdbcPagingItemReader : Reading page 0

Related

Transaction annotation in SpringBoot does not work

The Transaction annotation does not seem to work for me.
This code works (manually opening a transaction):
#Repository
public class PeoplePropertiesDaoHibernate implements PeoplePropertiesDao {
private static final Logger LOGGER = LoggerFactory.getLogger(PeoplePropertiesDaoHibernate.class);
private String lastQuery;
private EntityManager em;
#PersistenceContext(unitName="timesheetPersistence")
public void setEntityManager(EntityManager em) {
this.em = em;
}
#Qualifier("entityManager")
#Autowired
EntityManagerFactory emf;
#Override
public EntityManager getPersistenceContext()
{
return em;
}
public boolean isHoursManager(String peopleId) {
EntityManager em = emf.createEntityManager();
EntityTransaction tx = em.getTransaction();
String query =
"select count(*) " +
"from supervisor " +
"where people_id = :manager ";
tx.begin();
Session s = (Session) em.getDelegate();
Integer count = 0;
count = (Integer) s.createSQLQuery(query)
.setParameter("manager", peopleId)
.uniqueResult();
tx.commit();
em.close();
LOGGER.debug("Count = " + count + " return " + (count > 0));
return count > 0;
}
}
This code also works (type = PersistenceContextType.EXTENDED:
#Repository
public class PeoplePropertiesDaoHibernate implements PeoplePropertiesDao {
private static final Logger LOGGER = LoggerFactory.getLogger(PeoplePropertiesDaoHibernate.class);
private String lastQuery;
private EntityManager em;
#PersistenceContext(unitName="timesheetPersistence", type = PersistenceContextType.EXTENDED)
public void setEntityManager(EntityManager em) {
this.em = em;
}
#Override
public EntityManager getPersistenceContext()
{
return em;
}
public boolean isHoursManager(String peopleId) {
String query =
"select count(*) " +
"from supervisor " +
"where people_id = :manager ";
Session s = (Session) em.getDelegate();
Integer count = 0;
count = (Integer) s.createSQLQuery(query)
.setParameter("manager", peopleId)
.uniqueResult();
LOGGER.debug("Count = " + count + " return " + (count > 0));
return count > 0;
}
This code with #Transactional gives the error: Session/EntityManager is closed; nested exception is java.lang.IllegalStateException: Session/EntityManager is closed
#Transactional
#Repository
public class PeoplePropertiesDaoHibernate implements PeoplePropertiesDao {
private static final Logger LOGGER = LoggerFactory.getLogger(PeoplePropertiesDaoHibernate.class);
private String lastQuery;
private EntityManager em;
#PersistenceContext(unitName="timesheetPersistence")
public void setEntityManager(EntityManager em) {
this.em = em;
}
#Override
public EntityManager getPersistenceContext()
{
return em;
}
public boolean isHoursManager(String peopleId) {
String query =
"select count(*) " +
"from supervisor " +
"where people_id = :manager ";
Session s = (Session) em.getDelegate();
Integer count = 0;
count = (Integer) s.createSQLQuery(query)
.setParameter("manager", peopleId)
.uniqueResult();
LOGGER.debug("Count = " + count + " return " + (count > 0));
return count > 0;
}
What is the cause of this?
EDIT: THIS WORKS!! Why would the session from the getDelegate() not use the transaction?
String query =
"select count(*) " +
"from Supervisor " +
"where people_id = :manager ";
Session s = (Session) em.getDelegate();
Integer count = 0;
count = ((Long)em.createQuery(query)
.setParameter("manager", peopleId)
.getSingleResult()).intValue()
;
LOGGER.debug("Count = " + count + " return " + (count > 0));
return count > 0;

Drools decision table - rules not matching

I have a hello-world type spring/drools setup. The issue is no rules fire when in theory they should.
Decision Table:
Console output - server startup:
package com.example.drools;
//generated from Decision Table
import com.example.drools.TestRules;
// rule values at B9, header at B4
rule "_9"
when
$test:TestRules(number1 == 10)
then
$test.add("10");
end
Drools Config:
#Configuration
public class DroolsConfiguration
{
private final static String VALIDATION_RULES = "validation-rules.xls";
#Bean
public KieContainer validationRulesKieContainer() {
KieServices kieServices = KieServices.Factory.get();
Resource rules = ResourceFactory.newClassPathResource(VALIDATION_RULES);
compileXlsToDrl(rules);
KieFileSystem kieFileSystem = kieServices.newKieFileSystem().write(rules);
KieBuilder kieBuilder = kieServices.newKieBuilder(kieFileSystem);
KieBuilder builder = kieBuilder.buildAll();
KieModule kieModule = kieBuilder.getKieModule();
return kieServices.newKieContainer(kieModule.getReleaseId());
}
private static void compileXlsToDrl(Resource resource) {
try {
InputStream is = resource.getInputStream();
SpreadsheetCompiler compiler = new SpreadsheetCompiler();
String drl = compiler.compile(is, InputType.XLS);
System.out.println(drl);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Service:
#Service
public class ValidationRulesEngine
{
#Autowired
#Qualifier("validationRulesKieContainer")
private KieContainer validationKieContainer;
public void validate() {
KieSession kieSession = validationKieContainer.newKieSession();
kieSession.addEventListener(new DebugAgendaEventListener());
kieSession.addEventListener(new DebugRuleRuntimeEventListener());
TestRules tr = new TestRules(10, 20, 30);
kieSession.insert(tr);
int noOfRulesFired = kieSession.fireAllRules();
System.out.println("noOfRulesFired: " + noOfRulesFired);
System.out.println(tr);
System.out.println(tr.getRule());
}
}
TestRule - Fact:
public class TestRules
{
public int number1;
public int number2;
public int number3;
public List<String> rules = new ArrayList<String>();
public TestRules() {}
public TestRules(int number1, int number2, int number3)
{
super();
this.number1 = number1;
this.number2 = number2;
this.number3 = number3;
}
public void add(String rule) {
rules.add(rule);
}
public String getRule() {
return this.rules.size() > 0 ? this.rules.get(0) : "";
}
#Override
public String toString()
{
return "TestRules [number1=" + number1 + ", number2=" + number2 + ", number3=" + number3 + ", rules=" +
rules.stream().map(s -> s.toString()).collect(Collectors.joining(",")) + "]";
}
}
Console output - result:
2021-07-20 17:02:27.549 ERROR 20212 --- [nio-9016-exec-1] c.g.i.e.p.c.OfficeController : --> Rules Engine
==>[ObjectInsertedEventImpl: getFactHandle()=[fact 0:1:1539328290:1539328290:1:DEFAULT:NON_TRAIT:com.example.drools.TestRules:TestRules [number1=10, number2=20, number3=30, rules=]], getObject()=TestRules [number1=10, number2=20, number3=30, rules=], getKnowledgeRuntime()=KieSession[0], getPropagationContext()=PhreakPropagationContext [entryPoint=EntryPoint::DEFAULT, factHandle=[fact 0:1:1539328290:1539328290:1:DEFAULT:NON_TRAIT:com.example.drools.TestRules:TestRules [number1=10, number2=20, number3=30, rules=]], leftTuple=null, originOffset=-1, propagationNumber=2, rule=null, type=INSERTION]]
noOfRulesFired: 0
TestRules [number1=10, number2=20, number3=30, rules=]
2021-07-20 17:02:28.454 ERROR 20212 --- [nio-9016-exec-1] c.g.i.e.p.c.OfficeController : <-- Rules Engine
What am I missing?
This is no good:
$test:TestRules($test.number1 == 10, $test.number2 == 20)
You can't refer to $test before you declare it. The correct syntax is:
$test: TestRules( number1 == 10, number2 == 20 )
Fix your decision table from $test.number1 == $param to instead be number1 == $param. (And do the same for number2 adjacent.)
The rest looks fine, though I would suggest using a try-with-resources instead of a try-catch in your XLSX parsing method.

How to export huge result set from database into several csv files and zip them on the fly?

I need to create a REST controller which extracts data from a database and write it into CSV files that will ultimately be zipped together. Each CSV file should contain exactly 10 lines. Eventually all CSV files should be zipped into a one zip file. I want everything to happen on the fly, meaning - saving files to a temporary location on the disk is not an option. Can someone provide me with an example?
I found a very nice code to export huge amount of rows from database into several csv files and zip it.
I think this is a nice code that can assist alot of developers.
I have tested the solution and you can find the entire example at : https://github.com/idaamit/stream-from-db/tree/master
The conroller is :
#GetMapping(value = "/employees/{employeeId}/cars") #ResponseStatus(HttpStatus.OK) public ResponseEntity<StreamingResponseBody> getEmployeeCars(#PathVariable int employeeId) {
log.info("Going to export cars for employee {}", employeeId);
String zipFileName = "Cars Of Employee - " + employeeId;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment;filename=" + zipFileName + ".zip")
.body(
employee.getCars(dataSource, employeeId));
The employee class, first checks if we need to prepare more than one csv or not :
public class Employee {
public StreamingResponseBody getCars(BasicDataSource dataSource, int employeeId) {
StreamingResponseBody streamingResponseBody = new StreamingResponseBody() {
#Override
public void writeTo(OutputStream outputStream) throws IOException {
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
String sqlQuery = "SELECT [Id], [employeeId], [type], [text1] " +
"FROM Cars " +
"WHERE EmployeeID=? ";
PreparedStatementSetter preparedStatementSetter = new PreparedStatementSetter() {
public void setValues(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, employeeId);
}
};
StreamingZipResultSetExtractor zipExtractor = new StreamingZipResultSetExtractor(outputStream, employeeId, isMoreThanOneFile(jdbcTemplate, employeeId));
Integer numberOfInteractionsSent = jdbcTemplate.query(sqlQuery, preparedStatementSetter, zipExtractor);
}
};
return streamingResponseBody;
}
private boolean isMoreThanOneFile(JdbcTemplate jdbcTemplate, int employeeId) {
Integer numberOfCars = getCount(jdbcTemplate, employeeId);
return numberOfCars >= StreamingZipResultSetExtractor.MAX_ROWS_IN_CSV;
}
private Integer getCount(JdbcTemplate jdbcTemplate, int employeeId) {
String sqlQuery = "SELECT count([Id]) " +
"FROM Cars " +
"WHERE EmployeeID=? ";
return jdbcTemplate.queryForObject(sqlQuery, new Object[] { employeeId }, Integer.class);
}
}
This class StreamingZipResultSetExtractor is responsible to split the csv streaming data into several files and zip it.
#Slf4j
public class StreamingZipResultSetExtractor implements ResultSetExtractor<Integer> {
private final static int CHUNK_SIZE = 100000;
public final static int MAX_ROWS_IN_CSV = 10;
private OutputStream outputStream;
private int employeeId;
private StreamingCsvResultSetExtractor streamingCsvResultSetExtractor;
private boolean isInteractionCountExceedsLimit;
private int fileCount = 0;
public StreamingZipResultSetExtractor(OutputStream outputStream, int employeeId, boolean isInteractionCountExceedsLimit) {
this.outputStream = outputStream;
this.employeeId = employeeId;
this.streamingCsvResultSetExtractor = new StreamingCsvResultSetExtractor(employeeId);
this.isInteractionCountExceedsLimit = isInteractionCountExceedsLimit;
}
#Override
#SneakyThrows
public Integer extractData(ResultSet resultSet) throws DataAccessException {
log.info("Creating thread to extract data as zip file for employeeId {}", employeeId);
int lineCount = 1; //+1 for header row
try (PipedOutputStream internalOutputStream = streamingCsvResultSetExtractor.extractData(resultSet);
PipedInputStream InputStream = new PipedInputStream(internalOutputStream);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(InputStream))) {
String currentLine;
String header = bufferedReader.readLine() + "\n";
try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
createFile(employeeId, zipOutputStream, header);
while ((currentLine = bufferedReader.readLine()) != null) {
if (lineCount % MAX_ROWS_IN_CSV == 0) {
zipOutputStream.closeEntry();
createFile(employeeId, zipOutputStream, header);
lineCount++;
}
lineCount++;
currentLine += "\n";
zipOutputStream.write(currentLine.getBytes());
if (lineCount % CHUNK_SIZE == 0) {
zipOutputStream.flush();
}
}
}
} catch (IOException e) {
log.error("Task {} could not zip search results", employeeId, e);
}
log.info("Finished zipping all lines to {} file\\s - total of {} lines of data for task {}", fileCount, lineCount - fileCount, employeeId);
return lineCount;
}
private void createFile(int employeeId, ZipOutputStream zipOutputStream, String header) {
String fileName = "Cars for Employee - " + employeeId;
if (isInteractionCountExceedsLimit) {
fileCount++;
fileName += " Part " + fileCount;
}
try {
zipOutputStream.putNextEntry(new ZipEntry(fileName + ".csv"));
zipOutputStream.write(header.getBytes());
} catch (IOException e) {
log.error("Could not create new zip entry for task {} ", employeeId, e);
}
}
}
The class StreamingCsvResultSetExtractor is responsible for transfer the data from the resultset into csv file. There is more work to do to handle special character set which are problematic in csv cell.
#Slf4j
public class StreamingCsvResultSetExtractor implements ResultSetExtractor<PipedOutputStream> {
private final static int CHUNK_SIZE = 100000;
private PipedOutputStream pipedOutputStream;
private final int employeeId;
public StreamingCsvResultSetExtractor(int employeeId) {
this.employeeId = employeeId;
}
#SneakyThrows
#Override
public PipedOutputStream extractData(ResultSet resultSet) throws DataAccessException {
log.info("Creating thread to extract data as csv and save to file for task {}", employeeId);
this.pipedOutputStream = new PipedOutputStream();
ExecutorService executor = Executors.newSingleThreadExecutor();
executor.submit(() -> {
prepareCsv(resultSet);
});
return pipedOutputStream;
}
#SneakyThrows
private Integer prepareCsv(ResultSet resultSet) {
int interactionsSent = 1;
log.info("starting to extract data to csv lines");
streamHeaders(resultSet.getMetaData());
StringBuilder csvRowBuilder = new StringBuilder();
try {
int columnCount = resultSet.getMetaData().getColumnCount();
while (resultSet.next()) {
for (int i = 1; i < columnCount + 1; i++) {
if(resultSet.getString(i) != null && resultSet.getString(i).contains(",")){
String strToAppend = "\"" + resultSet.getString(i) + "\"";
csvRowBuilder.append(strToAppend);
} else {
csvRowBuilder.append(resultSet.getString(i));
}
csvRowBuilder.append(",");
}
int rowLength = csvRowBuilder.length();
csvRowBuilder.replace(rowLength - 1, rowLength, "\n");
pipedOutputStream.write(csvRowBuilder.toString().getBytes());
interactionsSent++;
csvRowBuilder.setLength(0);
if (interactionsSent % CHUNK_SIZE == 0) {
pipedOutputStream.flush();
}
}
} finally {
pipedOutputStream.flush();
pipedOutputStream.close();
}
log.debug("Created all csv lines for Task {} - total of {} rows", employeeId, interactionsSent);
return interactionsSent;
}
#SneakyThrows
private void streamHeaders(ResultSetMetaData resultSetMetaData) {
StringBuilder headersCsvBuilder = new StringBuilder();
for (int i = 1; i < resultSetMetaData.getColumnCount() + 1; i++) {
headersCsvBuilder.append(resultSetMetaData.getColumnLabel(i)).append(",");
}
int rowLength = headersCsvBuilder.length();
headersCsvBuilder.replace(rowLength - 1, rowLength, "\n");
pipedOutputStream.write(headersCsvBuilder.toString().getBytes());
}
}
In order to test this, you need to execute http://localhost:8080/stream-demo/employees/3/cars

Performance issue on JavaFX TableView with complex TableColumn

I'm trying to creating a JavaFX TableView with "tower-shape" columns. But the performance is very bad when there are more than 1000 columns in a TableView.
Here is my code:
public class PaneDemo extends Application {
private TableView<String> table = new TableView<>();
private Long timeStart;
private Long timeEnd;
public static void main(String[] args) {
launch(args);
}
private static int getTotal(int layer) {
int total = 0;
int start = 1;
for (int i = 0; i < layer - 1; i++) {
start *= 5;
total += start;
}
System.out.println("Total Columns: " + (total + start * 5));
return total;
}
#Override
public void start(Stage stage) {
Scene scene = new Scene(new Group());
AnchorPane anchorPane = new AnchorPane();
anchorPane.setPrefWidth(900);
anchorPane.setPrefHeight(600);
table.setColumnResizePolicy(TableView.UNCONSTRAINED_RESIZE_POLICY);
new Thread(() -> {
timeStart = System.currentTimeMillis();
//init first layer
TableColumn<String, String> Test1 = new TableColumn<>("Test1");
TableColumn<String, String> Test2 = new TableColumn<>("Test2");
TableColumn<String, String> Test3 = new TableColumn<>("Test3");
TableColumn<String, String> Test4 = new TableColumn<>("Test4");
TableColumn<String, String> Test5 = new TableColumn<>("Test5");
Queue<TableColumn<String, ?>> queue = new LinkedList<>();
table.getColumns().addAll(Test1, Test2, Test3, Test4, Test5);
table.getItems().add("test");
queue.addAll(table.getColumns());
int index = 0;
// set the layer of the column tower
int temp = getTotal(4);
while (index < temp) {
TableColumn<String, ?> root = queue.poll();
TableColumn<String, String> test1 = new TableColumn<>("test1");
TableColumn<String, String> test2 = new TableColumn<>("test2");
TableColumn<String, String> test3 = new TableColumn<>("test3");
TableColumn<String, String> test4 = new TableColumn<>("test4");
TableColumn<String, String> test5 = new TableColumn<>("test5");
root.getColumns().addAll(test1, test2, test3, test4, test5);
queue.addAll(root.getColumns());
index++;
}
while (!queue.isEmpty()) {
generateCellFactory((TableColumn<String, String>) queue.poll());
}
table.prefHeightProperty().bind(anchorPane.heightProperty());
table.prefWidthProperty().bind(anchorPane.widthProperty());
anchorPane.getChildren().add(table);
((Group) scene.getRoot()).getChildren().addAll(anchorPane);
stage.setScene(scene);
stage.show();
Platform.runLater(() -> {
timeEnd = System.currentTimeMillis();
System.out.println("Layout Time: " + (timeEnd - timeStart) + "ms");
});
}).run();
}
private <T> void generateCellFactory(TableColumn<T, String> column) {
column.setCellFactory(cell -> {
return new TableCell<T, String>() {
#Override
protected void updateItem(String item, boolean empty) {
super.updateItem(item, empty);
setText("Test");
}
};
});
}
}
On my PC the performance like this:
Total Columns: 780
Layout Time: 9810ms
Total Columns: 3905
Layout Time: 43920ms
Is there any way that I can improve the performance? Or any some pagination can be used on TableColumn?

Reduce doesn't run but job is successfully completed

Firstly, I am a newbie at Hadoop MapReduce. My reducer does not run but shows that the job is successfully completed. Below is my console output :
INFO mapreduce.Job: Running job: job_1418240815217_0015
INFO mapreduce.Job: Job job_1418240815217_0015 running in uber mode : false
INFO mapreduce.Job: map 0% reduce 0%
INFO mapreduce.Job: map 100% reduce 0%
INFO mapreduce.Job: Job job_1418240815217_0015 completed successfully
INFO mapreduce.Job: Counters: 30
The main class is :
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
#SuppressWarnings("deprecation")
Job job = new Job(conf,"NPhase2");
job.setJarByClass(NPhase2.class);
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(NPhase2Value.class);
job.setOutputKeyClass(NullWritable.class);
job.setOutputValueClass(Text.class);
job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class);
int numberOfPartition = 0;
List<String> other_args = new ArrayList<String>();
for(int i = 0; i < args.length; ++i)
{
try {
if ("-m".equals(args[i])) {
//conf.setNumMapTasks(Integer.parseInt(args[++i]));
++i;
} else if ("-r".equals(args[i])) {
job.setNumReduceTasks(Integer.parseInt(args[++i]));
} else if ("-k".equals(args[i])) {
int knn = Integer.parseInt(args[++i]);
conf.setInt("knn", knn);
System.out.println(knn);
} else {
other_args.add(args[i]);
}
job.setNumReduceTasks(numberOfPartition * numberOfPartition);
//conf.setNumReduceTasks(1);
} catch (NumberFormatException except) {
System.out.println("ERROR: Integer expected instead of " + args[i]);
} catch (ArrayIndexOutOfBoundsException except) {
System.out.println("ERROR: Required parameter missing from " + args[i-1]);
}
}
// Make sure there are exactly 2 parameters left.
if (other_args.size() != 2) {
System.out.println("ERROR: Wrong number of parameters: " +
other_args.size() + " instead of 2.");
}
FileInputFormat.setInputPaths(job, other_args.get(0));
FileOutputFormat.setOutputPath(job, new Path(other_args.get(1)));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
My mapper is :
public static class MapClass extends Mapper
{
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException
{
String line = value.toString();
String[] parts = line.split("\\s+");
// key format <rid1>
IntWritable mapKey = new IntWritable(Integer.valueOf(parts[0]));
// value format <rid2, dist>
NPhase2Value np2v = new NPhase2Value(Integer.valueOf(parts[1]), Float.valueOf(parts[2]));
context.write(mapKey, np2v);
}
}
My reducer class is :
public static class Reduce extends Reducer<IntWritable, NPhase2Value, NullWritable, Text>
{
int numberOfPartition;
int knn;
class Record
{
public int id2;
public float dist;
Record(int id2, float dist)
{
this.id2 = id2;
this.dist = dist;
}
public String toString()
{
return Integer.toString(id2) + " " + Float.toString(dist);
}
}
class RecordComparator implements Comparator<Record>
{
public int compare(Record o1, Record o2)
{
int ret = 0;
float dist = o1.dist - o2.dist;
if (Math.abs(dist) < 1E-6)
ret = o1.id2 - o2.id2;
else if (dist > 0)
ret = 1;
else
ret = -1;
return -ret;
}
}
public void setup(Context context)
{
Configuration conf = new Configuration();
conf = context.getConfiguration();
numberOfPartition = conf.getInt("numberOfPartition", 2);
knn = conf.getInt("knn", 3);
}
public void reduce(IntWritable key, Iterator<NPhase2Value> values, Context context) throws IOException, InterruptedException
{
//initialize the pq
RecordComparator rc = new RecordComparator();
PriorityQueue<Record> pq = new PriorityQueue<Record>(knn + 1, rc);
// For each record we have a reduce task
// value format <rid1, rid2, dist>
while (values.hasNext())
{
NPhase2Value np2v = values.next();
int id2 = np2v.getFirst().get();
float dist = np2v.getSecond().get();
Record record = new Record(id2, dist);
pq.add(record);
if (pq.size() > knn)
pq.poll();
}
while(pq.size() > 0)
{
context.write(NullWritable.get(), new Text(key.toString() + " " + pq.poll().toString()));
//break; // only ouput the first record
}
} // reduce
}
This is my helper class :
public class NPhase2Value implements WritableComparable {
private IntWritable first;
private FloatWritable second;
public NPhase2Value() {
set(new IntWritable(), new FloatWritable());
}
public NPhase2Value(int first, float second) {
set(new IntWritable(first), new FloatWritable(second));
}
public void set(IntWritable first, FloatWritable second) {
this.first = first;
this.second = second;
}
public IntWritable getFirst() {
return first;
}
public FloatWritable getSecond() {
return second;
}
#Override
public void write(DataOutput out) throws IOException {
first.write(out);
second.write(out);
}
#Override
public void readFields(DataInput in) throws IOException {
first.readFields(in);
second.readFields(in);
}
#Override
public boolean equals(Object o) {
if (o instanceof NPhase2Value) {
NPhase2Value np2v = (NPhase2Value) o;
return first.equals(np2v.first) && second.equals(np2v.second);
}
return false;
}
#Override
public String toString() {
return first.toString() + " " + second.toString();
}
#Override
public int compareTo(NPhase2Value np2v) {
return 1;
}
}
The command line command I use is :
hadoop jar knn.jar NPhase2 -m 1 -r 3 -k 4 phase1out phase2out
I am trying hard to figure out the error but still not able to come up with solution. Please help me in this regards as I am running on a tight schedule.
Because you have set the number of reducer task as 0. See this:
int numberOfPartition = 0;
//.......
job.setNumReduceTasks(numberOfPartition * numberOfPartition);
I dont see you have resetted numberOfPartition anywhere in your code. I thins you should set it where you are parsing -r option or remove call to setNumReduceTasks method as above completely as you are setting it already while parsing -r option.

Resources