0

I am using Spring Batch to process the uploaded file. When i try to read the file which is having 30K records, it only read 18k rows and processed. It didn't consider remaining records. These are the bean declared on my config.

<job id="JobSeq1" restartable="true" parent="ParentJob">
		<step id="DelegateTask" next = "FileParser" >
			<tasklet ref = "JobTask">
				<listeners>
						<listener ref="PromotionListener" />
				</listeners>
			</tasklet>	
		</step>
		
		<step id="FileParser" next = "FileStaggingProcessor" >
			<tasklet ref = "FileParserTask">
				<listeners>
						<listener ref="PromotionListener" />
				</listeners>
			</tasklet>	
		</step>
		
		<step id="FileStaggingProcessor">
			<tasklet>
				<chunk reader="FileReader" writer="FileWriter"
					commit-interval="100" skip-limit="100">
						<skippable-exception-classes>
							<include class="org.springframework.batch.item.file.FlatFileParseException" />
						</skippable-exception-classes>	
				</chunk>
			</tasklet>
		</step>
		</job>
		
		<beans:bean id="FileReader"
		class="org.com.myProj.FileReader">	
			<beans:property name="linesToSkip" value="1"/> 
			<beans:property name="lineMapper">
			<beans:bean
				class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
			
				<beans:property name="lineTokenizer">
					<beans:bean
						class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
						<beans:property name="delimiter" value="," />	
					</beans:bean>
				</beans:property>
				<beans:property name="fieldSetMapper" ref="myMapper">
				</beans:property>
			</beans:bean>
		</beans:property>
		<beans:property name="saveState" value="true"></beans:property>				
	</beans:bean>
		

And the reader looks like

public class FileReader extends FlatFileItemReader<CombMappingVO> implements 
StepExecutionListener{

@Override
public void beforeStep(StepExecution stepExecution) throws 
StepListenerFailedException{
if(!inputFilePath.isEmpty() && !ObjectValidator.isObjectNull(inputFilePath))
    {
	String csvInputFilePath = new Helper().convert2CSV(inputFilePath);
	super.setResource(new FileSystemResource(csvInputFilePath));
	}
 }
}

I use a mapper to load the file data to a temp table and i could see exactly 18K records. Is there a way to identify where the problem is?

2
  • Have you tried ItemReaderListener yet? It supports beforeRead, afterRead, and onError Commented Jan 25, 2018 at 13:31
  • I wouldn't recommend extending the FlatFileItemReader like you have. I'd recommend using a FactoryBean for the resource (how I describe it here: stackoverflow.com/questions/48407408/…). After the 18k records, does the reader return null or is there an exception or ? Commented Jan 25, 2018 at 15:18

0

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.