package com.ycl.dataListener; import com.alibaba.excel.context.AnalysisContext; import com.alibaba.excel.metadata.data.ReadCellData; import com.alibaba.excel.read.listener.ReadListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Consumer; /** * 通用easyexcel数据监听器 * * @author:xp * @date:2024/8/6 15:04 */ public class CurrencyDataListener implements ReadListener { private Consumer> consumer; /** * 每隔100条存储数据库,然后清理list ,方便内存回收 */ private static final int BATCH_COUNT = 500; /** * 缓存的数据 */ private List cachedDataList = new ArrayList<>(BATCH_COUNT); private final static Logger log = LoggerFactory.getLogger(CurrencyDataListener.class); public CurrencyDataListener(Consumer> consumer) { this.consumer = consumer; } /** * 读取出现异常处理 * @param e * @param analysisContext * @throws Exception */ @Override public void onException(Exception e, AnalysisContext analysisContext) throws Exception { } /** * 处理表头 * @param map * @param analysisContext */ @Override public void invokeHead(Map> map, AnalysisContext analysisContext) { } /** * 读取数据,每一条数据解析都会来调用 * @param data * @param analysisContext */ @Override public void invoke(T data, AnalysisContext analysisContext) { cachedDataList.add(data); // 达到BATCH_COUNT了,需要去存储一次数据库,防止数据几万条数据在内存,容易OOM if (cachedDataList.size() >= BATCH_COUNT) { try { saveData(); } catch(Exception e) { // 这里需要捕获异常,否则list无法清空,导致每读一条数据就会执行saveData方法 } // 存储完成清理 list cachedDataList = new ArrayList<>(BATCH_COUNT); } } /** * 读取完成 * @param analysisContext */ @Override public void doAfterAllAnalysed(AnalysisContext analysisContext) { saveData(); log.info("所有数据解析完成!"); } @Override public boolean hasNext(AnalysisContext analysisContext) { return true; } private void saveData() { log.info("{}条数据,开始存储数据库!", cachedDataList.size()); consumer.accept(cachedDataList); log.info("存储数据库成功!"); } }