Java 导出 CSV 文件操作详情
首先第一步 导入坐标:
<dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-csv</artifactId> <version>1.6</version> </dependency>
第二步
引入工具类 说明下 因为这个工具类用到是Listj集合
我就顺带吧 实体类和map 之间的转换也说了
import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVParser; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import java.io.*; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * 导出至csv文件 * */ public class CsvUtil { //CSV文件分隔符 private final static String NEW_LINE_SEPARATOR="\n"; /** CSV文件列分隔符 */ private static final String CSV_COLUMN_SEPARATOR = ","; /** CSV文件列分隔符 */ private static final String CSV_RN = "\r\n"; /**写入csv文件 * @param headers 列头 * @param data 数据内容 * @param filePath 创建的csv文件路径 * @throws IOException **/ public static void writeCsvWithHeader(String[] headers, List<Object[]> data, String filePath) { //初始化csvformat CSVFormat format = CSVFormat.DEFAULT.withHeader(headers); try { //根据路径创建文件,并设置编码格式 FileOutputStream fos = new FileOutputStream(filePath); OutputStreamWriter osw = new OutputStreamWriter(fos, "GBK"); //创建CSVPrinter对象 CSVPrinter printer = new CSVPrinter(osw, format); if(null!=data){ //循环写入数据 for(Object[] lineData:data){ printer.printRecord(lineData); } } printer.flush(); printer.close(); } catch (IOException e) { e.printStackTrace(); } } /**写入csv文件 * @param headers 列头 * @param data 数据内容 * @param filePath 创建的csv文件路径 * @throws IOException **/ public static void writeCsvWithRecordSeparator(Object[] headers, List<Object[]> data, String filePath){ //初始化csvformat CSVFormat format = CSVFormat.DEFAULT.withRecordSeparator(NEW_LINE_SEPARATOR); try { //根据路径创建文件,并设置编码格式 FileOutputStream fos = new FileOutputStream(filePath); OutputStreamWriter osw = new OutputStreamWriter(fos, "GBK"); //创建CSVPrinter对象 CSVPrinter printer = new CSVPrinter(osw,format); //写入列头数据 printer.printRecord(headers); if(null!=data){ //循环写入数据 for(Object[] lineData:data){ printer.printRecord(lineData); } } printer.flush(); printer.close(); System.out.println("CSV文件创建成功,文件路径:"+filePath); } catch (IOException e) { e.printStackTrace(); } } /** * @filePath 文件路径 */ public static List<CSVRecord> readCsvParse(String filePath){ List<CSVRecord> records = new ArrayList<>(); try { FileInputStream in = new FileInputStream(filePath); BufferedReader reader = new BufferedReader (new InputStreamReader(in,"GBK")); CSVParser parser = CSVFormat.EXCEL.parse(reader); records = parser.getRecords(); parser.close(); } catch (IOException e) { e.printStackTrace(); }finally { return records; } } /** * 自定义字段 * @filePath 文件路径 */ public static List<CSVRecord> readCsvParseWithHeader(String filePath,String[] headers){ List<CSVRecord> records = new ArrayList<>(); try { FileInputStream in = new FileInputStream(filePath); BufferedReader reader = new BufferedReader (new InputStreamReader(in,"GBK")); CSVParser parser = CSVFormat.EXCEL.withHeader(headers).parse(reader); records = parser.getRecords(); /*for (CSVRecord record : parser.getRecords()) { System.out.println(record.get("id") + "," + record.get("name") + "," + record.get("code")); }*/ parser.close(); }catch (IOException e){ e.printStackTrace(); }finally { return records; } } /** * 导出至多个csv文件 * */ public void writeMuti() throws InterruptedException { ExecutorService executorService = Executors.newFixedThreadPool(3); CountDownLatch doneSignal = new CountDownLatch(2); List<Map<String, String>> recordList = new ArrayList<>(); executorService.submit(new CsvExportThread("E:/0.csv", recordList, doneSignal)); executorService.submit(new CsvExportThread("E:/1.csv", recordList, doneSignal)); doneSignal.await(); System.out.println("Finish!!!"); } /** * @param colNames 表头部数据 * @param dataList 集合数据 * @param mapKeys 查找的对应数据 */ public static ByteArrayOutputStream doExport(String[] colNames, String[] mapKeys, List<Map> dataList) { try { StringBuffer buf = new StringBuffer(); // 完成数据csv文件的封装 // 输出列头 for (int i = 0; i < colNames.length; i++) { buf.append(colNames[i]).append(CSV_COLUMN_SEPARATOR); } buf.append(CSV_RN); if (null != dataList) { // 输出数据 for (int i = 0; i < dataList.size(); i++) { for (int j = 0; j < mapKeys.length; j++) { buf.append(dataList.get(i).get(mapKeys[j])).append(CSV_COLUMN_SEPARATOR); } buf.append(CSV_RN); } } // 写出响应 ByteArrayOutputStream os = new ByteArrayOutputStream(); //OutputStream os = new ByteArrayOutputStream(); os.write(buf.toString().getBytes("GBK")); os.flush(); os.close(); return os; } catch (Exception e) { LogUtils.error("doExport错误...", e); e.printStackTrace(); } return null; } public static HttpHeaders setCsvHeader(String fileName) { HttpHeaders headers = new HttpHeaders(); try { // 设置文件后缀 SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); String filename = new String(fileName.getBytes("gbk"), "iso8859-1") + sdf.format(new Date()) + ".csv"; headers.add("Pragma", "public"); headers.add("Cache-Control", "max-age=30"); headers.add("Content-Disposition", "attachment;filename="+filename); headers.setContentType(MediaType.valueOf("application/vnd.ms-excel;charset=UTF-8")); }catch (Exception e){ e.printStackTrace(); } return headers; } }
第三步
controller 层:
package com.example.demo.controller; import com.example.demo.service.DemoService; import com.example.demo.util.CsvUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; @Controller @RequestMapping("/demo") public class DemoController { @Autowired private DemoService demoService; @RequestMapping("/exportCsv") public ResponseEntity<byte[]> exportCsv(){ //设置excel文件名 String fileName="用户表"; //设置HttpHeaders,设置fileName编码,排除导出文档名称乱码问题 HttpHeaders headers = CsvUtil.setCsvHeader(fileName); byte[] value = null; try { //获取要导出的数据 value = this.demoService.exportCsv(); }catch (Exception e){ e.printStackTrace(); } return new ResponseEntity<byte[]>(value,headers, HttpStatus.OK); } }
第四步:
service 接口
package com.example.demo.service; public interface DemoService { /*导出csv文件*/ byte[] exportCsv(); }
第五步:
service 实现类
package com.example.demo.service.impl; import com.example.demo.pojo.User; import com.example.demo.service.DemoService; import com.example.demo.util.CsvUtil; import org.apache.commons.beanutils.BeanUtils; import org.springframework.stereotype.Service; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; @Service public class DemoServiceImpl implements DemoService { @Override public byte[] exportCsv() { byte[] content = null; try { String[] sTitles = new String[]{"名称","年龄","性别"}; String[] mapKeys = new String[]{"name","age","sex"}; List<Map> dataList = new ArrayList<>(); //数据 for (int i = 0; i < 10; i++) { User user = new User("小明" + i, i, "男" + i); Map map = BeanUtils.describe(user); dataList.add(map); } ByteArrayOutputStream os = CsvUtil.doExport(sTitles,mapKeys,dataList); content = os.toByteArray(); }catch (Exception e){ e.printStackTrace(); } return content; } }
补充 :
象和map 互相转换
坐标
<dependency> <groupId>commons-beanutils</groupId> <artifactId>commons-beanutils</artifactId> <version>1.9.3</version> </dependency>
用到的工具类:
BeanUtils
1. bean( 实体类)转Map
例子:
Person person=new Person(); person1.setName("张三"); person1.setSex("不男不女"); Map<String, Object> map=null; map = BeanUtils.describe(person1);
2. map转bean(实体类)
例子: 估计谁没事也不会用map 转bean 可能是我见识短了/**
* Map转换层Bean,使用泛型免去了类型转换的麻烦。 * @param <T> * @param map * @param class1 * @return */ public static <T> T map2Bean(Map<String, String> map, Class<T> class1) { T bean = null; try { bean = class1.newInstance(); BeanUtils.populate(bean, map); } catch (Exception e) { e.printStackTrace(); } return bean; }
到此这篇关于Java 导出 CSV 文件操作详情的文章就介绍到这了,更多相关Java 导出 CSV 文件内容请搜索我们以前的文章或继续浏览下面的相关文章希望大家以后多多支持我们!
赞 (0)