在有大量数据操作时,同时发几万至几百条数据同时,执行insert,update时,没有及时commit,释放资源,会干挂掉数据库服务器的,所以我们有必要,分批次处理数据的插入和更新
public class StudentTest { private static Logger logger = Logger.getLogger(StudentTest.class); private SqlSession sqlSession = null; private StudentMapper studentMapper = null; /** * 测试方法前调用 * * @throws Exception */ @Before public void setUp() throws Exception { sqlSession = SqlSessionFactoryUtil.openSession(); studentMapper = sqlSession.getMapper(StudentMapper.class); } /** * 测试方法后调用 * * @throws Exception */ @After public void tearDown() throws Exception { sqlSession.close(); } @Test public void batchInsertStudentPage() { List<Student> list = new ArrayList<Student>(); for (int i = 0; i < 10; i++) { Student student = new Student(); student.setId(i); student.setName("test" + i); list.add(student); } try { save(list); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } // 分批次插入 private void save(List<Student> uidCodeList) throws Exception { SqlSession batchSqlSession = null; try { batchSqlSession = SqlSessionFactoryUtil.openSession();// 获取批量方式的sqlsession int batchCount = 1000;// 每批commit的个数 int batchLastIndex = batchCount - 1;// 每批最后一个的下标 for (int index = 0; index < uidCodeList.size() - 1;) { if (batchLastIndex > uidCodeList.size() - 1) { batchLastIndex = uidCodeList.size() - 1; batchSqlSession.insert("com.mybatis.mappers.StudentMapper.batchInsertStudent", uidCodeList.subList(index, batchLastIndex + 1)); batchSqlSession.commit(); System.out.println("index:" + index + " batchLastIndex:" + batchLastIndex); break;// 数据插入完毕,退出循环 } else { batchSqlSession.insert("com.mybatis.mappers.StudentMapper.batchInsertStudent", uidCodeList.subList(index, batchLastIndex + 1)); batchSqlSession.commit(); System.out.println("index:" + index + " batchLastIndex:" + batchLastIndex); index = batchLastIndex + 1;// 设置下一批下标 batchLastIndex = index + (batchCount - 1); } } } finally { batchSqlSession.close(); } }} }接口
public interface StudentMapper { public int batchInsertStudent(List<Student> list); }
<mapper namespace="com.mybatis.mappers.StudentMapper"> <!-- 1,size:表示缓存cache中能容纳的最大元素数。默认是1024; 2,flushInterval:定义缓存刷新周期,以毫秒计; 3,eviction:定义缓存的移除机制;默认是LRU(least recently userd,最近最少使用),还有FIFO(first in first out,先进先出) 4,readOnly:默认值是false,假如是true的话,缓存只能读。 --> <cache size="1024" flushInterval="60000" eviction="LRU" readOnly="false" /> <resultMap type="Student" id="StudentResult"> <id property="id" column="id" /> <result property="name" column="name" /> </resultMap> <!-- 批量更新 --> <insert id="batchInsertStudent" parameterType="List"> insert into /*+append_values */ t_student(id,name) <foreach collection="list" item="item" index="index" separator="union all"> select #{item.id}, #{item.name} from dual </foreach> </insert> </mapper>
public class Student implements Serializable { private Integer id; private String name; private String remark;// clob public Student() { super(); // TODO Auto-generated constructor stub } public Student(Integer id, String name) { super(); this.id = id; this.name = name; } public Student(String namee) { super(); this.name = name; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getRemark() { return remark; } public void setRemark(String remark) { this.remark = remark; } @Override public String toString() { return "Student [id=" + id + ", name=" + name + ", remark=" + remark + "]"; } }
public class SqlSessionFactoryUtil { private static SqlSessionFactory sqlSessionFactory; public static SqlSessionFactory getSqlSessionFactory() { if (sqlSessionFactory == null) { InputStream inputStream = null; try { inputStream = Resources .getResourceAsStream("mybatis-config.xml"); sqlSessionFactory = new SqlSessionFactoryBuilder() .build(inputStream); } catch (Exception e) { e.printStackTrace(); } } return sqlSessionFactory; } public static SqlSession openSession() { return getSqlSessionFactory().openSession(); } }说明:本人于ITEYE创建于2014年,现转移到CSDN