使用Java將圖片生成sequence file並儲存到HBase
/**
* package: com.cloudera.hbase
* describe: 使用Java將圖片生成sequence file並儲存到HBase
* creat_user: Fayson
* email: htechinfo@163.com
* creat_date: 2017/11/30
* creat_time: 上午12:49
* 公眾號:Hadoop實操
*/
public class SequenceFileTest {
//HDFS路徑
static String inpath = "/fayson/picHbase";
static String outpath = "/fayson/out";
static SequenceFile.Writer writer = null;
static HTable htable = null;
public static void main(String[] args) throws Exception{
//inpath = args[0];
//outpath = args[1];
//String zklist = args[2];
//HBase入庫
Configuration hbaseConf = HBaseConfiguration.create();
hbaseConf.set("hbase.zookeeper.property.clientPort", "2181");
hbaseConf.setStrings("hbase.zookeeper.quorum", "ip-172-31-5-38.ap-southeast-1.compute.internal");
//指定表名
htable = new HTable(hbaseConf,"picHbase");
//設定讀取本地磁碟檔案
Configuration conf = new Configuration();
//conf.addResource(new Path("C:\\Users\\17534\\eclipse-workspace\\hbaseexmaple\\core-site.xml"));
//conf.addResource(new Path("C:\\Users\\17534\\eclipse-workspace\\hbaseexmaple\\hdfs-site.xml"));
URI uri = new URI(inpath);
FileSystem fileSystem = FileSystem.get(uri, conf,"hdfs");
//例項化writer物件
writer = SequenceFile.createWriter(fileSystem, conf, new Path(outpath), Text.class, BytesWritable.class);
//遞迴遍歷資料夾,並將檔案下的檔案寫入sequenceFile檔案
listFileAndWriteToSequenceFile(fileSystem,inpath);
//關閉流
org.apache.hadoop.io.IOUtils.closeStream(writer);
//讀取所有檔案
URI seqURI = new URI(outpath);
FileSystem fileSystemSeq = FileSystem.get(seqURI, conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fileSystemSeq, new Path(outpath), conf);
Text key = new Text();
BytesWritable val = new BytesWritable();
// key = (Text) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
// val = (BytesWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
int i = 0;
while(reader.next(key, val)){
String temp = key.toString();
temp = temp.substring(temp.lastIndexOf("/") + 1);
// temp = temp.substring(temp.indexOf("Image")+6, temp.indexOf("."));
// String[] tmp = temp.split("/");
//rowKey 設計
String rowKey = temp;
// String rowKey = Integer.valueOf(tmp[0])-1+"_"+Integer.valueOf(tmp[1])/2+"_"+Integer.valueOf(tmp[2])/2;
System.out.println(rowKey);
//指定ROWKEY的值
Put put = new Put(Bytes.toBytes(rowKey));
//指定列簇名稱、列修飾符、列值 temp.getBytes()
put.addColumn("picinfo".getBytes(), "content".getBytes() , val.getBytes());
htable.put(put);
}
htable.close();
org.apache.hadoop.io.IOUtils.closeStream(reader);
}
/****
* 遞迴檔案;並將檔案寫成SequenceFile檔案
* @param fileSystem
* @param path
* @throws Exception
*/
public static void listFileAndWriteToSequenceFile(FileSystem fileSystem,String path) throws Exception{
final FileStatus[] listStatuses = fileSystem.listStatus(new Path(path));
for (FileStatus fileStatus : listStatuses) {
if(fileStatus.isFile()){
Text fileText = new Text(fileStatus.getPath().toString());
System.out.println(fileText.toString());
//返回一個SequenceFile.Writer例項 需要資料流和path物件 將資料寫入了path物件
FSDataInputStream in = fileSystem.open(new Path(fileText.toString()));
byte[] buffer = IOUtils.toByteArray(in);
in.read(buffer);
BytesWritable value = new BytesWritable(buffer);
//寫成SequenceFile檔案
writer.append(fileText, value);
}
if(fileStatus.isDirectory()){
listFileAndWriteToSequenceFile(fileSystem,fileStatus.getPath().toString());
}
}
}
}
-----上傳檔案到hdfs
package com.bigdata.log.controller;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
@Controller
public class Local2HdfsController {
/**
* 單個檔案
* @param file
* @param request
* @param response
* @return
* @throws Exception
*/
@RequestMapping(value = "/upload2hdfs", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public String upload(@RequestParam("file") MultipartFile file, HttpServletRequest request,HttpServletResponse response) throws Exception {
HDFSClientUtil hdfsClient = new HDFSClientUtil();
InputStream in = file.getInputStream();
//上傳檔案到/input/upload/下
String tempPathString = "/input/upload/";
//檔名
String name = file.getOriginalFilename();
String flag = "";
if(!name.equals("")) {
//上傳完成後的路徑
String cloudPath = tempPathString + name;
try {
hdfsClient.uploadFile(in, cloudPath);
flag = "ok";
} catch (Exception e) {
System.out.println("上傳失敗");
flag = "fail";
}
}
in.close();
return flag;
}
/**
* 多檔案批量上傳
* @param file
* @param request
* @param response
* @return
* @throws Exception
*/
@RequestMapping(value = "/upload2hdfsMore", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public String uploads(@RequestParam("file") MultipartFile[] file, HttpServletRequest request,HttpServletResponse response) throws Exception {
HDFSClientUtil hdfsClient = new HDFSClientUtil();
//上傳檔案到/input/upload/下
//String tempPathString = "/input/upload/";
String tempPathString = "/tmp/picHbase/";
String flag = "";
for (int i = 0; i < file.length; i++) {
InputStream in = file[i].getInputStream();
//檔名
String name = file[i].getOriginalFilename();
//上傳完成後的路徑
String cloudPath = tempPathString + name;
if (!name.equals("")) {
try {
hdfsClient.uploadFile(in, cloudPath);
flag = "ok";
} catch (Exception e) {
System.out.println("上傳失敗");
flag = "fail";
}
in.close();
}
}
return flag;
}
/**
* @function 本地檔案上傳至 HDFS
* @param source 原檔案路徑
* @param dest 目的檔案路徑
* @throws IOException
* @throws URISyntaxException;
*
* 直接讀取本地資料夾,將資料夾中的檔案上傳至hdfs,暫時不使用該方法
*/
@RequestMapping(value = "/uploadTest.do", method = RequestMethod.POST)
@ResponseBody
public void copyFromLocal(HttpServletRequest request,HttpServletResponse response) throws Exception {
//String source = request.getParameter("source")==null?"":request.getParameter("source");
String source = "D://tmp";
//String source = "./data/weibo.txt";
// hdfs檔案路徑
String dest = "hdfs://199.66.68.112:8020/manyPic/";
// 讀取hadoop檔案系統的配置
Configuration conf = new Configuration();
URI uri = new URI("hdfs://199.66.68.112:8020");
// FileSystem是使用者操作HDFS的核心類,它獲得URI對應的HDFS檔案系統
FileSystem fileSystem = FileSystem.get(uri, conf,"hdfs");
// 原始檔路徑
Path srcPath = new Path(source);
// 目的路徑
Path dstPath = new Path(dest);
// 檢視目的路徑是否存在
if (!(fileSystem.exists(dstPath))) {
// 如果路徑不存在,即刻建立
fileSystem.mkdirs(dstPath);
}
// 得到本地檔名稱
String filename = source.substring(source.lastIndexOf('/') + 1,source.length());
try {
// 將本地檔案上傳到HDFS
fileSystem.copyFromLocalFile(srcPath, dstPath);
System.out.println("File " + filename + " copied to " + dest);
} catch (Exception e) {
System.err.println("Exception caught! :" + e);
System.exit(1);
} finally {
fileSystem.close();
}
}
}
---顯示圖片和視訊到jsp頁面中
package com.bigdata.log.controller;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IOUtils;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
public class HdfsVideoController {
/**
* 視訊播放
* @param request
* @param response
* @throws IllegalArgumentException
* @throws IOException
*/
@RequestMapping(value = "/playVideo.do", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public void test(HttpServletRequest request, HttpServletResponse response) throws IllegalArgumentException, IOException{
String fpath=request.getParameter("fpath")==null?"":request.getParameter("fpath");
if(fpath==null)
return;
//199.66.68.111:8020/.....
String filename=fpath;
//建立configuration物件conf
Configuration conf = new Configuration();
//指定hdfs的nameservice為cluster1,是NameNode的URI-Uniform Resource Locator
conf.set("fs.defaultFS","hdfs://ns");
//指定hdfs的nameservice為cluster1
conf.set("dfs.nameservices","ns");
//cluster1下面有兩個NameNode,分別是nna,nns active/standby
conf.set("dfs.ha.namenodes.ns","nna,nns");
//nna的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nna","199.66.68.111:8020");
//nns的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nns","199.66.68.112:8020");
//配置失敗自動切換實現方式
conf.set("dfs.client.failover.proxy.provider.ns", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
FileSystem fs = null;
FSDataInputStream in=null;
try {
fs = FileSystem.get(URI.create(filename),conf);
in=fs.open(new Path(filename));
} catch (IOException e) {
e.printStackTrace();
}
final long fileLen = fs.getFileStatus(new Path(filename)).getLen();
String range=request.getHeader("Range");
response.setHeader("Content-type","video/mp4");
OutputStream out=response.getOutputStream();
if(range==null)
{
filename=fpath.substring(fpath.lastIndexOf("/")+1);
response.setHeader("Content-Disposition", "attachment; filename="+filename);
response.setContentType("application/octet-stream");
response.setContentLength((int)fileLen);
IOUtils.copyBytes(in, out, fileLen, false);
}
else
{
long start=Integer.valueOf(range.substring(range.indexOf("=")+1, range.indexOf("-")));
long count=fileLen-start;
long end;
if(range.endsWith("-"))
end=fileLen-1;
else
end=Integer.valueOf(range.substring(range.indexOf("-")+1));
String ContentRange="bytes "+String.valueOf(start)+"-"+end+"/"+String.valueOf(fileLen);
response.setStatus(206);
response.setContentType("video/mpeg4");
response.setHeader("Content-Range",ContentRange);
in.seek(start);
try{
IOUtils.copyBytes(in, out, count, false);
}
catch(Exception e)
{
throw e;
}
}
in.close();
in = null;
out.close();
out = null;
}
/**
* 查詢/input/upload路徑下的所有MP4格式的視訊檔案
* @param request
* @param response
* @return
* @throws Exception
*/
@RequestMapping(value = "/getAllVideo.do", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public ArrayList<String> getDirectoryFromHdfs(HttpServletRequest request, HttpServletResponse response) throws Exception {
ArrayList<String> list = new ArrayList<String>();
String direPath = "/input/upload";
//建立configuration物件conf
Configuration conf = new Configuration();
//指定hdfs的nameservice為cluster1,是NameNode的URI-Uniform Resource Locator
conf.set("fs.defaultFS","hdfs://ns");
//指定hdfs的nameservice為cluster1
conf.set("dfs.nameservices","ns");
//cluster1下面有兩個NameNode,分別是nna,nns active/standby
conf.set("dfs.ha.namenodes.ns","nna,nns");
//nna的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nna","199.66.68.111:8020");
//nns的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nns","199.66.68.112:8020");
//配置失敗自動切換實現方式
conf.set("dfs.client.failover.proxy.provider.ns", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
FileSystem fs = FileSystem.get(URI.create(direPath), conf);
FileStatus[] filelist = fs.listStatus(new Path(direPath));
for (int i = 0; i < filelist.length; i++) {
FileStatus fileStatus = filelist[i];
// /input/upload路徑下的檔名
String nameStrings = fileStatus.getPath().getName().toString();
if(nameStrings.substring(nameStrings.length()-3).equals("mp4")) {
list.add(fileStatus.getPath().getName());
}
}
fs.close();
return list;
}
/**
* 從hdfs中下載視訊到本地
* @param request
* @param response
* @throws Exception
*/
@RequestMapping(value = "/downloadVideo.do", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public void test111(HttpServletRequest request, HttpServletResponse response) throws Exception{
Configuration conf = new Configuration();
//指定hdfs的nameservice為cluster1,是NameNode的URI-Uniform Resource Locator
conf.set("fs.defaultFS","hdfs://ns");
//指定hdfs的nameservice為cluster1
conf.set("dfs.nameservices","ns");
//cluster1下面有兩個NameNode,分別是nna,nns active/standby
conf.set("dfs.ha.namenodes.ns","nna,nns");
//nna的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nna","199.66.68.111:8020");
//nns的RPC通訊地址
conf.set("dfs.namenode.rpc-address.ns.nns","199.66.68.112:8020");
//配置失敗自動切換實現方式
conf.set("dfs.client.failover.proxy.provider.ns", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
FileSystem fs = FileSystem.get(conf);
// 獲取輸入流
FSDataInputStream fis = fs.open(new Path("/input/upload/dispose.mp4"));
// 建立輸出流
FileOutputStream fos = new FileOutputStream(new File("D://測試/dispose.mp4"));
IOUtils.copyBytes(fis, fos, conf);
}
private static Configuration hbaseConf = HBaseConfiguration.create();
static {
hbaseConf.set("fs.defaultFS","hdfs://199.66.68.112:8020");
hbaseConf.set("hbase.zookeeper.quorum","199.66.68.111,199.66.68.112,199.66.68.113,199.66.68.114,199.66.68.115,199.66.68.116,199.66.68.117");
hbaseConf.set("hbase.zookeeper.property.clientPort", "2181");
}
/**
* 圖片顯示
* rowkey:hbase中的行鍵
* tablename:hbase的表名
* @param request
* @param response
* @throws IOException
*/
@RequestMapping(value = "/playImg.do", method = {RequestMethod.POST, RequestMethod.GET})
@ResponseBody
public void scanTable(HttpServletRequest request, HttpServletResponse response) throws IOException {
//行鍵
String rowKey = request.getParameter("rowKey")==null?"":request.getParameter("rowKey");
//表名
String tablename = request.getParameter("tablename")==null?"":request.getParameter("tablename");
//列簇
String family = request.getParameter("family")==null?"":request.getParameter("family");
//列名
String col = request.getParameter("col")==null?"":request.getParameter("col");
Configuration configuration = HBaseConfiguration.create();
//configuration.set("fs.defaultFS","hdfs://199.66.68.112:8020");
configuration.set("hbase.zookeeper.quorum","199.66.68.111,199.66.68.112,199.66.68.113,199.66.68.114,199.66.68.115,199.66.68.116,199.66.68.117");
configuration.set("hbase.zookeeper.property.clientPort", "2181");
HTable table = new HTable(configuration,tablename);
Get get = new Get(rowKey.getBytes());
get.addColumn(Bytes.toBytes(family),Bytes.toBytes(col));
Result rs = table.get(get);
if(!rs.isEmpty()) {
//bs為圖片的位元組流
//儲存get result的結果,位元組陣列形式
byte[] bs = rs.getValue(Bytes.toBytes(family), Bytes.toBytes(col));
table.close();
OutputStream out=response.getOutputStream();
out.write(bs);
out.flush();
out.close();
}
}
}
來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/450962/viewspace-2711401/,如需轉載,請註明出處,否則將追究法律責任。
相關文章
- Android將圖片儲存到相簿並及時看到Android
- 微信小程式--通過canvas生成圖片並儲存到本地微信小程式Canvas
- 使用freemarker將echarts圖片儲存到word中Echarts
- 生成二維碼,並且儲存,指定位置的view成圖片,並且儲存到本地相簿View
- 將圖片檔案儲存到Oracle的儲存過程Oracle儲存過程
- python抓取網頁中圖片並儲存到本地Python網頁
- ImageView中圖片儲存到檔案View
- 安卓上傳圖片到伺服器並儲存到電腦本地安卓伺服器
- python入門012~使用requests爬取網路圖片並儲存到本地Python
- 如何採集淘寶直通車的所有圖片,並儲存到電腦上
- 長按UIWebView上的圖片儲存到相簿UIWebView
- 將 HTML 生成圖片HTML
- 用java的api將資料從HDFS上存到HBASE中JavaAPI
- 將echarts生成的圖表變為圖片儲存起來Echarts
- 直播網站原始碼,上傳圖片到專案目錄並將相對路徑儲存到資料庫網站原始碼資料庫
- 原生JS實現base64圖片下載-圖片儲存到本地JS
- Java 將PDF轉為HTML時儲存到流JavaHTML
- Android將view儲存為圖片並放在相簿中AndroidView
- 短視訊平臺開發,將圖片、視訊儲存到本地的相簿中
- express,koa2等node處理前端上傳圖片並儲存到檔案中Express前端
- 諮詢數學公式儲存到mysql中 非圖片形式儲存公式MySql
- Android生成圖片並放入相簿Android
- canvas元件繪製的內容匯出生成圖片儲存到相簿後開啟異常Canvas元件
- Sparkstreaming讀取Kafka訊息再結合SparkSQL,將結果儲存到HBaseSparkKafkaSQL
- 將ebay易貝網上的多款商品圖片儲存到電腦要怎麼操作
- iOS視訊、圖片下載加密 解密以及儲存到本地iOS加密解密
- 使用Scrapy爬取圖片入庫,並儲存在本地
- 種草丨小紅書圖片如何儲存到手機?高畫質無水印圖片!(收藏)
- C# 截圖並儲存為圖片C#
- 使用 JDAudioCrawler 將下載的音訊儲存到本地儲存音訊
- html2canvas:將html的dom變成圖片,並儲存HTMLCanvas
- 小程式canvan畫布,現兩張圖片合成一張,並儲存到本地
- 將excel檔案內容儲存到資料庫,並可以實時在前端檢視(不必生成檔案)Excel資料庫前端
- 爬取微博圖片資料存到Mysql中遇到的各種坑mysql儲存圖片爬取微博圖片MySql
- Java 從指定URL下載檔案並儲存到指定目錄Java
- 將網站儲存成圖片網站
- 小程式生成二維碼圖片儲存相簿並分享到朋友圈
- javascript如何將檔案儲存到本地JavaScript