HDFS read and write

21ca發表於2017-03-13
  1. import java.io.OutputStream;
  2. import java.net.URI;
  3. import java.net.URL;

  4. import org.apache.commons.io.IOUtils;
  5. import org.apache.hadoop.conf.Configuration;
  6. import org.apache.hadoop.fs.FSDataInputStream;
  7. import org.apache.hadoop.fs.FileSystem;
  8. import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
  9. import org.apache.hadoop.fs.Path;

  10. public class HdfsTest {

  11.     public static void main(String[] args) throws Exception {
  12.         write();
  13.     }

  14.     public static void read1(String url) throws Exception {
  15.         URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
  16.         System.out.println(IOUtils.toString(new URL(url).openStream()));
  17.     }
  18.     
  19.     public static void read2(String url) throws Exception {
  20.         Configuration conf = new Configuration();
  21.         FileSystem fs = FileSystem.get(URI.create(url), conf);
  22.         FSDataInputStream is = fs.open(new Path(url));
  23.         System.out.println(IOUtils.toString(is));
  24.         is.close();
  25.         fs.close();
  26.     }

  27.     public static void write() throws Exception {
  28.         String hdfsFile = "hdfs://192.168.202.134:9000/test/text2.txt";
  29.         Configuration conf = new Configuration();
  30.         FileSystem fs = FileSystem.get(URI.create(hdfsFile), conf, "andy");
  31.         OutputStream out = fs.create(new Path(hdfsFile), true);
  32.         out.write("new file text2.txt".getBytes());
  33.         out.close();
  34.         fs.close();
  35.         read1(hdfsFile);
  36.         read2(hdfsFile);
  37.     }
  38. }

來自 “ ITPUB部落格 ” ,連結:http://blog.itpub.net/10742815/viewspace-2135204/,如需轉載,請註明出處,否則將追究法律責任。

相關文章