@
目錄
- java實現下載hdfs檔案及資料夾
- 說明:java實現從HDFS上下載檔案及資料夾的功能,以流形式輸出,便於使用者自定義儲存任何路徑下
- 1.下載xxx檔案
- 2.下載xx資料夾
java實現下載hdfs檔案及資料夾
說明:java實現從HDFS上下載檔案及資料夾的功能,以流形式輸出,便於使用者自定義儲存任何路徑下
<!--阿里 FastJson依賴-->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.1</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>3.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.1</version>
</dependency>
相關類引入jar包,程式碼上方檢視對照即可
1.下載xxx檔案
“下載檔案” 執行流程說明:
1.構建hdfs連線,初始化Configuration
2.獲取檔案輸入流FSDataInputStream,呼叫downloadFile()
3.方法內部先設定header請求頭,格式以檔名(convertFileName(fileName))輸出檔案,然後輸出流內部資訊以流的形式輸出
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import util.ExportUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* 下載檔案
* @author liudz
* @date 2020/6/9
* @return 執行結果
**/
@RequestMapping(value = "/down", method = RequestMethod.GET)
public ResponseEntity<InputStreamResource> Test01() throws URISyntaxException, IOException {
//下面兩行,初始化hdfs配置連線
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://172.16.1.9:8020"), conf);
FSDataInputStream inputStream = fs.open(new Path("hdfs://172.16.1.9:8020/spark/testLog.txt"));
ResponseEntity<InputStreamResource> result = ExportUtil.downloadFile(inputStream, "testLog.txt");
return result;
}
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
/**
* 檔案以流的形式讀取
*
* @param in 字元輸入流
* @param fileName 檔名字
* @return 返回結果
*/
public static ResponseEntity<InputStreamResource> downloadFile(InputStream in, String fileName) {
try {
byte[] testBytes = new byte[in.available()];
HttpHeaders headers = new HttpHeaders();
headers.add("Cache-Control", "no-cache, no-store, must-revalidate");
headers.add("Content-Disposition", String.format("attachment; filename=\"%s\"", convertFileName(fileName)));
headers.add("Pragma", "no-cache");
headers.add("Expires", "0");
headers.add("Content-Language", "UTF-8");
//最終這句,讓檔案內容以流的形式輸出
return ResponseEntity.ok().headers(headers).contentLength(testBytes.length)
.contentType(MediaType.parseMediaType("application/octet-stream")).body(new InputStreamResource(in));
} catch (IOException e) {
log.info("downfile is error" + e.getMessage());
}
log.info("file is null" + fileName);
return null;
}
2.下載xx資料夾
“下載資料夾及內部檔案” 執行流程說明:
1.初始化header請求頭資訊,格式以xx.zip輸出資料夾,呼叫down2()
2.構建hdfs連線,初始化Configuration
3.呼叫迭代器compress,傳入引數(資料夾整體路徑 + ZipOutputStream例項 + FileSystem例項)
4.迭代器執行思路:
遍歷對應子目錄:1)如果為資料夾,zip寫入一個檔案進入點(路徑末尾單詞 + “/”)
2)如果為檔案,zip寫入檔案(目錄檔案的整體路徑)
----------------------------------------------------------------------------------------
******注意:容易出錯2行程式碼:******
壓縮檔案:zipOutputStream.putNextEntry(new ZipEntry(name.substring(1)));
壓縮資料夾:zipOutputStream.putNextEntry(new ZipEntry(fileStatulist[i].getPath().getName() + "/"));
**name屬性用於zip建立檔案,fileStatulist[i].getPath().getName()用於zip建立資料夾**
-----------------------------------------------------------------------------------------
舉例說明:
假設資料夾spark-warehouse路徑下有2資料夾data1和data2,資料夾下各一個a.txt文字檔案
第一步:獲取路徑“C:/Users/liudz/Desktop/spark-warehouse”下的目錄,也就是(C:/Users/liudz/Desktop/spark-warehouse/data1、C:/Users/liudz/Desktop/spark-warehouse/data2)
lastName=spark-warehouse
name=/spark-warehouse/data1
判斷“C:/Users/liudz/Desktop/spark-warehouse/data1”為目錄,zip寫入“data1/”資料夾
第二步:獲取路徑“C:/Users/liudz/Desktop/spark-warehouse/data1”下的目錄,也就是(C:/Users/liudz/Desktop/spark-warehouse/data1/a.txt)
lastName=data1
name=/data1/a.txt
判斷“C:/Users/liudz/Desktop/spark-warehouse/data1/a.txt”為檔案,zip寫入“data1/a。txt”檔案
。
。
。
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import util.ExportUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* 下載資料夾
* @param businessId 業務ID
* @author liudz
* @date 2020/6/9
* @return 執行結果
**/
@RequestMapping(value = "/downloadFolder", method = RequestMethod.GET)
public ResponseEntity<byte[]> downloadFolder(Long businessId) throws IOException {
ResponseEntity<byte[]> response = null;
HttpHeaders headers = new HttpHeaders();
headers.add("Cache-Control", "no-cache, no-store, must-revalidate");
headers.add("Content-Disposition", "attachment; filename=spark-warehouse.zip");
headers.add("Pragma", "no-cache");
headers.add("Expires", "0");
headers.add("Content-Language", "UTF-8");
ByteArrayOutputStream zos =
(ByteArrayOutputStream) hdfsClientService.down2("hdfs://172.16.1.9:8020/spark/spark-warehouse");
byte[] out = zos.toByteArray();
zos.close();
response = new ResponseEntity<>(out, headers, HttpStatus.OK);
return response;
}
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Service;
/**
* 多檔案
*
* @param cloudPath
* cloudPath
* @author liudz
* @date 2020/6/8
* @return 執行結果
**/
public OutputStream down2(String cloudPath) {
// 1獲取物件
ByteArrayOutputStream out = null;
try {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(new URI("hdfs://172.16.1.9:8020"), conf);
out = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(out);
compress(cloudPath, zos, fs);
zos.close();
} catch (IOException e) {
log.info("----error:{}----" + e.getMessage());
} catch (URISyntaxException e) {
log.info("----error:{}----" + e.getMessage());
}
return out;
}
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Service;
/**
* compress
*
* @param baseDir
* baseDir
* @param zipOutputStream
* zipOutputStream
* @param fs
* fs
* @author liudz
* @date 2020/6/8
**/
public void compress(String baseDir, ZipOutputStream zipOutputStream, FileSystem fs) throws IOException {
try {
FileStatus[] fileStatulist = fs.listStatus(new Path(baseDir));
log.info("basedir = " + baseDir);
String[] strs = baseDir.split("/");
//lastName代表路徑最後的單詞
String lastName = strs[strs.length - 1];
for (int i = 0; i < fileStatulist.length; i++) {
String name = fileStatulist[i].getPath().toString();
name = name.substring(name.indexOf("/" + lastName));
if (fileStatulist[i].isFile()) {
Path path = fileStatulist[i].getPath();
FSDataInputStream inputStream = fs.open(path);
zipOutputStream.putNextEntry(new ZipEntry(name.substring(1)));
IOUtils.copyBytes(inputStream, zipOutputStream, Integer.parseInt("1024"));
inputStream.close();
} else {
zipOutputStream.putNextEntry(new ZipEntry(fileStatulist[i].getPath().getName() + "/"));
log.info("fileStatulist[i].getPath().toString() = " + fileStatulist[i].getPath().toString());
compress(fileStatulist[i].getPath().toString(), zipOutputStream, fs);
}
}
} catch (IOException e) {
log.info("----error:{}----" + e.getMessage());
}
}
重要資訊
- 官網:https://ais.cn/u/vEbMBz