Search code examples
out-of-memoryspring-batchlarge-data

Spring Batch OutOfMemoryException when reading a csv file with 1M rows and 900Mo size


I am trying to read a very large CSV file containing more than 1M rows using a FlatFileItemReader but when launching my batch job I get an OutOfMemoryException after about 10min.

Here is my code :

@Slf4j
@Configuration
@EnableBatchProcessing
@ComponentScan({
        "f.p.f.batch",
        "f.p.f.batch.tasklet"
})
public class BatchConfig {

@Autowired
private StepBuilderFactory steps;

@Autowired
private JobBuilderFactory jobBuilderFactory;

@Autowired
private DemoTasklet demoTasklet;

@Bean
public ResourcelessTransactionManager transactionManager() {
    return new ResourcelessTransactionManager();
}

@Bean
public JobRepository jobRepository(ResourcelessTransactionManager transactionManager) {
    MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean = new MapJobRepositoryFactoryBean(transactionManager);
    mapJobRepositoryFactoryBean.setTransactionManager(transactionManager);
    try {
        return mapJobRepositoryFactoryBean.getObject();
    } catch (Exception ex) {
        log.error("Exception : {}", ex.getMessage(), ex);
        return null;
    }
}


@Bean
//@StepScope
public FlatFileItemReader<Balance> csvAnimeReader() {
    FlatFileItemReader<Balance> reader = new FlatFileItemReader<>();
    DefaultLineMapper lineMapper = new DefaultLineMapper();
    FieldSetMapper fieldSetMapper = new BalanceFieldSetMapper();
    DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
    tokenizer.setNames(new String[]{
            "EXER",
            "IDENT",
            "NDEPT",
            "LBUDG",
            "INSEE",
            "SIREN",
            "CREGI",
            "NOMEN",
            "CTYPE",
            "CSTYP",
            "CACTI",
            "FINESS",
            "SECTEUR",
            "CBUDG",
            "CODBUD1",
            "COMPTE ",
            "BEDEB",
            "BECRE",
            "OBNETDEB",
            "OBNETCRE",
            "ONBDEB",
            "ONBCRE",
            "OOBDEB",
            "OOBCRE",
            "SD",
            "SC"
    });
    tokenizer.setDelimiter(";");

    lineMapper.setLineTokenizer(tokenizer);
    lineMapper.setFieldSetMapper(fieldSetMapper);
    reader.setLineMapper(lineMapper);
    reader.setResource(new ClassPathResource("Balance_Exemple_2016.csv"));
    reader.setLinesToSkip(1);
    return reader;
}


@Bean
public ItemProcessor<Balance, Balance> CsvFileProcessor() {
    return new BalanceProcessor();
}

@Bean
public BalanceWriter balanceWriter() {
    return new BalanceWriter();
}

@Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
    SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
    simpleJobLauncher.setJobRepository(jobRepository);
    return simpleJobLauncher;
}

@Bean
public Step step1() {
    return steps.get("step1")
            .<Balance, Balance>chunk(1)
            .reader(csvAnimeReader())
            .writer(balanceWriter())
            .build();
}

@Bean
public Step step2() {
    return steps.get("step2")
            .tasklet(demoTasklet)
            .build();
}

@Bean
public Job readCsvJob() {
    return jobBuilderFactory.get("readCsvJob")
            .incrementer(new RunIdIncrementer())
            .flow(step1())
            .next(step2())
            .end()
            .build();
}

}


Solution

  • I suggest you to use streaming, since you never want to read all your file at once, which is a major problem.

    here is a nice article how to read file more efficiently without holding you entire memory space