使用Spring batch进行批处理
来源:互联网 发布:淘宝店铺的图片尺寸 编辑:程序博客网 时间:2024/06/10 01:38
今日公司Boss提出了新需求: 使用Spring-batch对数据进行汇总,我在网上找了好多配置方法,都没有配置成功,最后终于还是搞了个基本版出来了,贴出来以免自己忘记了Spring-batch是一种批量的批处理方式:其工作方式是: 工作流程为开启一个Job—>开启一个step—>(掉用reader—>调用processor)(循环直到读取到最后一条数据)—>调用writer—>完成一个step—>查看是否有下一个step(next="xxx")—>没有则完成一个Job,有则进入下一个step.. 总结起来就是开启Job重复reader—>processor最后把所有的数据一次性交给writer操作
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:batch="http://www.springframework.org/schema/batch" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch-2.1.xsd http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd "><bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher"> <property name="jobRepository" ref="jobRepository" /> </bean> <bean id="jobRepository" class="org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean"> <property name="validateTransactionState" value="false" /> </bean>
Job 由于我在Spring 中已经开启了事务,所以便把Spring-batch中的事务关掉了,否则会报已经拥有事务异常
<batch:job id="writerclassJob"> <batch:step id="stepwriter1" > <batch:tasklet> <batch:chunk reader="jdbcItemReader" writer="messagesItemWriter" processor="itemProcessor" commit-interval="10"> <!--commit-interval="10" 并行处理的数量--> </batch:chunk> </batch:tasklet> </batch:step> </batch:job>
jdbcItemReader 读 ${detail_endtime}从配置文件中读取的数据作为查询的参数
<bean id="jdbcItemReader" class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step"> <property name="dataSource" ref="dataSource" /> <property name="sql" value="select distinct examineeclassid,classcount from work where checkdate >'${detail_startime}' and checkdate < '${detail_endtime}' " /> <property name="rowMapper" ref="ledgerRowMapper"> </property> </bean>
ledgerRowMapper 从数据库读取的数据传过来 ResultSet
package com.yc.batch;import java.sql.ResultSet;import java.sql.SQLException;import org.springframework.jdbc.core.RowMapper;import org.springframework.stereotype.Component;import com.yc.vo.TeacherWorkdetail;import com.yc.vo.Workdetail;@Component("ledgerRowMapper") public class LedgerRowMapper implements RowMapper { @Override public Object mapRow(ResultSet rs, int rowNum) throws SQLException { Workdetail twd=new Workdetail(); twd.setClasscount(rs.getInt("classcount")); twd.setExamineeclassid(rs.getInt("examineeclassid")); return twd; }}
itemProcessor 操作
<bean id="itemProcessor" class="com.yc.batch.MessagesItemProcessor" scope="step"> </bean>
package com.yc.batch;import org.hibernate.engine.transaction.jta.platform.internal.SynchronizationRegistryBasedSynchronizationStrategy;import org.springframework.batch.item.ItemProcessor;import org.springframework.stereotype.Component;import org.springframework.stereotype.Service;import com.yc.vo.Workdetail;//业务层@Component("messagesItemProcessor")public class MessagesItemProcessor implements ItemProcessor<Workdetail, Workdetail> {//在这类对获取的数据进行操作 public Workdetail process(Workdetail workdetail) throws Exception { return workdetail; }}
messagesItemWriter 写 我是讲将数据库中的数据存入csv文件中
package com.yc.batch;import java.io.InputStream;import java.text.NumberFormat;import java.util.ArrayList;import java.util.List;import java.util.Properties;import javax.annotation.Resource;import org.springframework.batch.item.ItemWriter;import org.springframework.stereotype.Component;import org.springframework.stereotype.Service;import com.yc.biz.ExamineeClassBiz;import com.yc.biz.WorkBiz;import com.yc.utils.CsvUtils;import com.yc.vo.Workdetail;import net.sf.ehcache.util.PropertyUtil;//写 @Component("messagesItemWriter")public class MessagesItemWriter implements ItemWriter<Workdetail>{ @Resource(name = "workBiz") private WorkBiz workBiz; @Resource(name = "examineeClassBiz") private ExamineeClassBiz examineeClassBiz; public ExamineeClassBiz getExamineeClassBiz() { return examineeClassBiz; } public void setExamineeClassBiz(ExamineeClassBiz examineeClassBiz) { this.examineeClassBiz = examineeClassBiz; } public WorkBiz getWorkBiz() { return workBiz; } public void setWorkBiz(WorkBiz workBiz) { this.workBiz = workBiz; } public void write(List<? extends Workdetail> messages) throws Exception { //this.workBiz.updateWorkCheckcount(); Properties props = new Properties(); InputStream in= PropertyUtil.class.getClassLoader().getResourceAsStream("connectionConfig.properties"); props.load(in); String startime=props.getProperty("detail_startime"); String endtime=props.getProperty("detail_endtime"); String time=props.getProperty("detail_time"); List<Object> works=new ArrayList<Object>(); for(Workdetail work:messages){ Workdetail workdetail=new Workdetail(); Integer classid=work.getExamineeclassid(); int num=this.workBiz.getClassWorkNum(classid,startime,endtime); String classname=this.examineeClassBiz.findExamineeClassById(classid).getClassName(); int checkcount=this.workBiz.getWorkCheckcount(classid, startime, endtime); Double p=(double) Integer.parseInt(String.valueOf(checkcount/(num*(work.getClasscount()))*100)); workdetail.setExamineeclassid(classid); workdetail.setWorkcount(num); workdetail.setClasscount(work.getClasscount()); workdetail.setClassName(classname); workdetail.setCheckcount(checkcount); workdetail.setCompletionrate(p); works.add(workdetail); } CsvUtils cu=new CsvUtils(); String path=this.getClass().getResource("/").getPath(); //String path=Class.class.getClass().getResource("/").getPath(); path=path.substring(0,path.lastIndexOf("/")); path=path.substring(0,path.lastIndexOf("/")); path=path.substring(0,path.lastIndexOf("/")); path=path.substring(0,path.lastIndexOf("/")); cu.writeCsv(path+"/csv/class_"+time+".csv",works ); }}
CsvUtils 类 使用jar包为javacsv2.0
package com.yc.utils;import java.io.File;import java.io.IOException;import java.io.InputStream;import java.nio.charset.Charset;import java.util.ArrayList;import java.util.List;import java.util.Properties;import com.csvreader.CsvReader;import com.csvreader.CsvWriter;import net.sf.ehcache.util.PropertyUtil;public class CsvUtils { /** * 读取CSV文件 */ public List<String[]> readeCsv(String path){ try { ArrayList<String[]> csvList = new ArrayList<String[]>(); //用来保存数据 String csvFilePath = path; CsvReader reader = new CsvReader(csvFilePath,',',Charset.forName("GBK")); //一般用这编码读就可以了 //reader.readHeaders(); // 跳过表头 如果需要表头的话,不要写这句。 while(reader.readRecord()){ //逐行读入除表头的数据 csvList.add(reader.getValues()); } reader.close(); /*for(int row=0;row<csvList.size();row++){ String cell = csvList.get(row)[0]; //取得第row行第0列的数据 System.out.println(cell); } */ return csvList; }catch(Exception ex){ System.out.println(ex); } return null; } /** * 写入CSV文件 * @throws IOException */ public void writeCsv(String path,List<Object> t) throws IOException{ String csvFilePath = path;// System.out.println(path);// System.out.println("================================="); String filepath=path.substring(0,path.lastIndexOf("/")); File f=new File(filepath); if(!f.exists()){ f.mkdirs(); } File file=new File(path); if(!file.exists()){ file.createNewFile(); } CsvWriter wr =new CsvWriter(csvFilePath,',',Charset.forName("GBK")); try { for(Object obj:t){ String[] contents=obj.toString().split(","); //String[] contents = {"aaaaa","bbbbb","cccccc","ddddddddd"}; wr.writeRecord(contents); } wr.close(); } catch (IOException e) { e.printStackTrace(); } } }
阅读全文
0 0
- 使用Spring batch进行批处理
- 使用JDBC进行批处理Batch
- 使用 Spring Batch 构建企业级批处理应用
- Spring Batch 批处理框架
- Spring Batch 批处理框架
- Spring batch批处理框架
- 9.2 批处理Spring Batch
- Spring Batch 批处理框架
- Spring Batch批处理框架
- 使用 Spring Batch 构建企业级批处理应用: 第 1 部分
- 使用 Spring Batch 构建企业级批处理应用: 第 2 部分
- 使用 Spring Batch 构建企业级批处理应用: 第 1 部分
- 使用 Spring Batch 构建企业级批处理应用: 第 2 部分
- Spring batch 第 1 部分:使用 Spring Batch 构建企业级批处理应用
- spring data jpa batch批处理
- Spring Batch批处理框架初探
- Spring Batch 批处理框架介绍
- Spring Batch批处理框架了解
- 蓝桥杯训练——除去次方数
- zero pad模块 simulink在OFDM FFT中
- dojo和Jquery混用 chart
- 2017杭电多校联赛team4 Questionnaire 水
- 论C++11 中vector的N种遍历方法
- 使用Spring batch进行批处理
- 用FontCreator创键iconFont
- 文章标题
- react redux 4 学习
- python数据类型详解
- 算法学习记录六(C++)--->获取斐波那契数列第n项
- 马海方、汪国新、薛林兴、王阔海、马硕山等出席“翰墨千秋名家书画鉴藏展”
- 虚拟机安装Ubuntu Server 16.04中文版时出现“无法安装busybox-initramfs”
- digits本地配置+Anaconda,Theano, Tensorflow, Keras的安装