Skip to content

Commit 1e95bce

Browse files
committed
Added documentation to chunk-optional-processor project.
1 parent 197ebb2 commit 1e95bce

File tree

2 files changed

+38
-3
lines changed

2 files changed

+38
-3
lines changed

batch/chunk-optional-processor/pom.xml

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@
1010

1111
<artifactId>chunk-optional-processor</artifactId>
1212
<packaging>war</packaging>
13-
14-
<name>${project.artifactId}</name>
13+
<name>Batch Chunk Optional Processor</name>
14+
<description>Chunk Processing - Read and Write</description>
1515

1616
<dependencies>
1717
<dependency>

batch/chunk-optional-processor/src/test/java/org/javaee7/batch/chunk/optional/processor/BatchChunkOptionalProcessorTest.java

+36-1
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,30 @@
1919
import static org.junit.Assert.assertEquals;
2020

2121
/**
22+
* The Batch specification provides a Chunk Oriented processing style. This style is defined by enclosing into a
23+
* transaction a set of reads, process and write operations via +javax.batch.api.chunk.ItemReader+,
24+
* +javax.batch.api.chunk.ItemProcessor+ and +javax.batch.api.chunk.ItemWriter+. Items are read one at a time, processed
25+
* and aggregated. The transaction is then committed when the defined +checkpoint-policy+ is triggered.
26+
*
27+
* include::myJob.xml[]
28+
*
29+
* A very simple job is defined in the +myJob.xml+ file. The processor is optional, so this is just a single step with a
30+
* reader and a writer.
31+
*
2232
* @author Roberto Cortez
2333
*/
2434
@RunWith(Arquillian.class)
2535
public class BatchChunkOptionalProcessorTest {
36+
/**
37+
* We're just going to deploy the application as a +web archive+. Note the inclusion of the following files:
38+
*
39+
* [source,file]
40+
* ----
41+
* /META-INF/batch-jobs/myjob.xml
42+
* ----
43+
*
44+
* The +myjob.xml+ file is needed for running the batch definition.
45+
*/
2646
@Deployment
2747
public static WebArchive createDeployment() {
2848
WebArchive war = ShrinkWrap.create(WebArchive.class)
@@ -34,6 +54,15 @@ public static WebArchive createDeployment() {
3454
return war;
3555
}
3656

57+
/**
58+
* In the test, we're just going to invoke the batch execution and wait for completion. To validate the test
59+
* expected behaviour we need to query the +Metric[]+ object available in the step execution.
60+
*
61+
* The batch process itself will read 10 elements from numbers 1 to 10, and write the same elements. Commits are
62+
* executed after 3 elements are read.
63+
*
64+
* @throws Exception an exception if the batch could not complete successfully.
65+
*/
3766
@Test
3867
public void testBatchChunkOptionalProcessor() throws Exception {
3968
JobOperator jobOperator = BatchRuntime.getJobOperator();
@@ -46,12 +75,18 @@ public void testBatchChunkOptionalProcessor() throws Exception {
4675
for (StepExecution stepExecution : stepExecutions) {
4776
if (stepExecution.getStepName().equals("myStep")) {
4877
Map<Metric.MetricType, Long> metricsMap = BatchTestHelper.getMetricsMap(stepExecution.getMetrics());
78+
79+
// <1> The read count should be 10 elements. Check +MyItemReader+.
4980
assertEquals(10L, metricsMap.get(Metric.MetricType.READ_COUNT).longValue());
81+
// <2> The write count should be 5. Only half of the elements read are processed to be written.
5082
assertEquals(10L, metricsMap.get(Metric.MetricType.WRITE_COUNT).longValue());
51-
assertEquals(10L / 3 + (10L % 3 > 0 ? 1 : 0), metricsMap.get(Metric.MetricType.COMMIT_COUNT).longValue());
83+
// <3> The commit count should be 4. Checkpoint is on every 3rd read, 4 commits for read elements.
84+
assertEquals(10L / 3 + (10L % 3 > 0 ? 1 : 0),
85+
metricsMap.get(Metric.MetricType.COMMIT_COUNT).longValue());
5286
}
5387
}
5488

89+
// <4> Job should be completed.
5590
assertEquals(BatchStatus.COMPLETED, jobExecution.getBatchStatus());
5691
}
5792
}

0 commit comments

Comments
 (0)