package com.lh.digital.integration.claimdatahandler.utility;
import com.lh.digital.integration.claimdatahandler.model.Claim;
import com.mongodb.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.step.skip.SkipPolicy;
import org.springframework.batch.item.data.MongoItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
/**
*
* @author RAVI VARMA YARAkARAJU
*/
/*
* BatchConfiguration is designed to handle below points.
*
* Conversion File: For each type of file supplied by or generated to an external system, a conversion application
* will need to be created to convert the transaction records supplied into a standard format required for processing.
*
* Validation of File: Validation of file ensure that all input/output records are correct and consistent.
* Validation is typically based on file headers and trailers, checksums and validation algorithms as well as record
* level cross-checks. If any error found those records will be stored in error log file
*
* Extract File: Application that reads a set of records from a input file, selects records based
* on predefined rules, and writes the sucessfully validated records to an output file.
*
* Output/Format File: Applications reading an input file, restructures data from this record according to a
* standard format, validates the data and produces an output to store into Mongo DB. Errirs will be logged into
* error log file
*
*
*/
@Configuration
@EnableBatchProcessing
@PropertySource("classpath:config.properties")
public class BatchConfiguration {
private static final Logger log = LoggerFactory.getLogger(BatchConfiguration.class);
/*
* Convenient factory for a JobBuilder which sets the JobRepository automatically provided by Spring.
* Creates a job builder and initializes its job repository.
* JobBuilderFactory(JobRepository jobRepository)
*
* */
@Autowired
public JobBuilderFactory jobBuilderFactory;
/*
*
* Convenient factory for a StepBuilder which sets the JobRepository and PlatformTransactionManager automatically.
* StepBuilderFactory(JobRepository jobRepository,
* org.springframework.transaction.PlatformTransactionManager transactionManager)
* Creates a step builder and initializes its job repository and transaction manager. Note that if the builder
* is used to create a @Bean definition then the name of the step and the bean name might be different.
* */
@Autowired
public StepBuilderFactory stepBuilderFactory;
/*
* The below properties are coming from config.properties file to set, where some one can set the below items
* {@code}
* datafile.delimiter = ,
* datafile.setname = "firstName","lastName"
* datafile.dbname = claimdata
* datafile.flatFile = sample-data.csv
* datafile.dbcollectionname = claimfiledata
* datafile.errorFile= ./errorLog.txt
* datafile.skipLines= 0
* */
@Value("${datafile.dbcollectionname}")
private String dbCollectionName ;
@Value("${datafile.dbname}")
private String dbName;
@Value("${datafile.flatFile}")
private String flatFile ;
@Value("${datafile.setname}")
private String[] setNames;
@Value("${datafile.delimiter}")
private String delimiter;
@Value("${datafile.skipLines}")
private int skipLines;
/*
*
* Primary implementation of MongoOperations
*
* MongoTemplate(MongoDbFactory mongoDbFactory) Constructor used for a basic template configuration.
* */
MongoTemplate mongoTemplate;
// tag::readerwriterprocessor[]
/*
*
* method : reader()
*
* @param args Unused.
* @return FlatFileItemReader.
* @exception FlatFileParseException
* @see FlatFileParseException
*
*
* Description:
* Restartable ItemReader that reads lines from input setResource(Resource). --> input is Flat file name
*
* The linesToSkip attribute indicates the number of lines that should be ignored by the reader as there is no header row.
*
* To perform the line reads this reader() uses Spring Batch implementation called DefaultLineMapper which requires a
* line tokenizer component to split the line contents into individual fields.
*
* The tokenizer called DelimitedLineTokenizer that is configured with a list of field names.
*
* The DelimitedLineTokenizer splits the line into tokens that are later referenced by the token names defined.
*
* The delimiter attribute specifies the delimiter used to tokenize each line of the input file. In this instance our
* input file is comma delimited.
*
* The fieldSetMapper attribute refers to the default class BeanWrapperFieldSetMapper that takes a Field Set and maps
* the fields to instance variables on the Claim POJO.
*
*
* Line is defined by the setRecordSeparatorPolicy(RecordSeparatorPolicy) and mapped to
* item using setLineMapper(LineMapper). If an exception is thrown during line mapping it is rethrown as
* FlatFileParseException adding information about the problematic line and its line number.
* */
@Bean
public FlatFileItemReader<Claim> reader() {
FlatFileItemReader fileReader=new FlatFileItemReader();
fileReader.setResource(new ClassPathResource(flatFile));
fileReader.setLinesToSkip(skipLines);
DefaultLineMapper lineMapper=new DefaultLineMapper();
DelimitedLineTokenizer tokenizer=new DelimitedLineTokenizer();
tokenizer.setDelimiter(delimiter);
tokenizer.setNames(setNames);
FieldSetMapper fieldSetMapper=new ClaimsFieldSetMapper();
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(fieldSetMapper);
fileReader.setLineMapper(lineMapper);
return fileReader;
/*FlatFileItemReader<Claims> reader = new FlatFileItemReader<Claims>();
reader.setResource(new ClassPathResource(flatFile));
reader.setLinesToSkip(skipLines);
reader.setLineMapper(new DefaultLineMapper<Claims>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setDelimiter(delimiter);
setNames(setNames);
}});
setFieldSetMapper(new ClaimsFieldSetMapper<Claim>() {{
setTargetType(Claim.class);
}});
}});
return reader;*/
}
/*
* method : processor()
*
* Description:
* This method will process the Item which is a part of execution of the step1()
*
*
* @param args for processor () none.
* @return ClaimsItemProcessor.
*
* */
@Bean
public ClaimsItemProcessor processor() {
return new ClaimsItemProcessor();
}
/*
* method : mongoDbFactory()
*
* Description:
* This method will create a db factory based on dbname provided.
*
*
* @param args for mongoDbFactory () none.
* @return SimpleMongoDbFactory.
*
* */
@Bean
public MongoDbFactory mongoDbFactory() throws Exception {
return new SimpleMongoDbFactory(new MongoClient(), dbName);
}
/*
* method : mongoTemplate()
*
* Description:
* This method will create a mongo template based on mongoDbFactory() and that can be used to connect to mongo db.
*
* @param args for mongoTemplate () none.
* @return SimpleMongoDbFactory.
*
* */
@Bean
public MongoTemplate mongoTemplate() throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory());
this.mongoTemplate = mongoTemplate;
return mongoTemplate;
}
/*
* method : mongoRecordWriter()
*
* Description:
* This method will write the validated records to mongo db. And returns writer object to step1() to complete the job.
*
* @param args for mongoRecordWriter () none.
* @return MongoItemWriter which implements ItemWriter.
*
* */
@Bean
public MongoItemWriter<Claim> mongoRecordWriter() {
MongoItemWriter<Claim> writer = new MongoItemWriter<Claim>();
try {
writer.setTemplate(mongoTemplate());
} catch (Exception e) {
log.error(e.toString());
}
writer.setCollection(dbCollectionName);
return writer;
}
// end::readerwriterprocessor[]
/*
* method : fileVerificationSkipper()
*
* Description:
* If there is any error while building the step1() the faultTolerant().skipPolicy(fileVerificationSkipper()) will be
* called and based on skipCount(int) it will skip, and if that limit exceeds it will throw exception.
*
* @param args for fileVerificationSkipper () none.
* @return SkipPolicy object.
* @throws SkipLimitExceededException
*
* */
@Bean
public SkipPolicy fileVerificationSkipper() {
return new FileVerificationSkipper();
}
// tag::listener[]
/*
* method : listener()()
*
* Description:
* This method will get the status for batch execution step, and status get recorded.
* JobCompletionNotificationListener class will be called to override the afterJob() method ,
*
* @param args for listener() () none.
* @return JobExecutionListener object.
*
* */
@Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
// end::listener[]
// tag::jobstep[]
/*
* method : importUserJob()
*
* Description:
*
* Creates a job builder and initializes its job repository. Note that if the builder is used to create a @Bean
* definition then the name of the job and the bean name might be different.
*
* jobBuilderFactory.get("jobName") i.e., importUserJob
* Parameters:
* name - the name of the job
* Returns:
* job builder
* Run the JobExecution and update the meta information like status and statistics as necessary. This method should
* not throw any exceptions for failed execution. Clients should be careful to inspect the JobExecution status
* to determine success or failure.
*
* @param args for importUserJob () none.
* @return Job.
*
* */
@Bean
public Job importUserJob() throws Exception {
return jobBuilderFactory.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(step1())
.end()
.build();
}
/*
* method : step1()
*
* Description:
* Spring Batch uses a 'Chunk Oriented' processing style within its most common implementation. Chunk oriented processing
* refers to reading the data one at a time, and creating 'chunks' that will be written out, within a transaction
* boundary. One item is read in from an ItemReader, handed to an ItemProcessor, and aggregated. Once the number of
* items read equals the commit interval, the entire chunk is written out via the ItemWriter, and then the transaction
* is committed.
*
* public StepBuilder get(java.lang.String name)
* Creates a step builder and initializes its job repository and transaction manager. Note that if the builder is
* used to create a @Bean definition then the name of the step and the bean name might be different.
* Parameters:
* name - the name of the step (i.e., step1)
* Returns:
* a step builder (Step)
*
* @param args Unused.
* @return Step.
* @exception skip policy on fileVerificationSkipper() On input error.
* @see FlatFileParseException
*
* */
@Bean
public Step step1() {
return stepBuilderFactory.get("step1")
.<Claim, Claim> chunk(1)
.reader(reader())
.faultTolerant().skipPolicy(fileVerificationSkipper())
.processor(processor())
.writer(mongoRecordWriter())
.build();
}
// end::jobstep[]
}
BatchConfiguration is designed to handle below points.
*
* Conversion File: For each type of file supplied by or generated to an external system, a conversion application
* will need to be created to convert the transaction records supplied into a standard format required for processing.
*
* Validation of File: Validation of file ensure that all input/output records are correct and consistent.
* Validation is typically based on file headers and trailers, checksums and validation algorithms as well as record
* level cross-checks. If any error found those records will be stored in error log file
*
* Extract File: Application that reads a set of records from a input file, selects records based
* on predefined rules, and writes the sucessfully validated records to an output file.
*
* Output/Format File: Applications reading an input file, restructures data from this record according to
* a standard format, validates the data and produces an output to store into Mongo DB. Errirs will be logged into
error log file
*
* Conversion File: For each type of file supplied by or generated to an external system, a conversion application
* will need to be created to convert the transaction records supplied into a standard format required for processing.
*
* Validation of File: Validation of file ensure that all input/output records are correct and consistent.
* Validation is typically based on file headers and trailers, checksums and validation algorithms as well as record
* level cross-checks. If any error found those records will be stored in error log file
*
* Extract File: Application that reads a set of records from a input file, selects records based
* on predefined rules, and writes the sucessfully validated records to an output file.
*
* Output/Format File: Applications reading an input file, restructures data from this record according to
* a standard format, validates the data and produces an output to store into Mongo DB. Errirs will be logged into
error log file
Be the first to comment
You can use [html][/html], [css][/css], [php][/php] and more to embed the code. Urls are automatically hyperlinked. Line breaks and paragraphs are automatically generated.