日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程资源 > 编程问答 >内容正文

编程问答

MapReduce基础开发之八HDFS文件CRUD操作

發布時間:2025/4/16 编程问答 21 豆豆
生活随笔 收集整理的這篇文章主要介紹了 MapReduce基础开发之八HDFS文件CRUD操作 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

HDFS文件操作的基礎代碼。

package com.hive;import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;public class HdfsCRUD {public static void main(String[] args) {//提交集群執行yarn jar//ls//String srcPath="/tmp/fjs/dpi1/";//HdfsCRUD.list(srcPath);//cat//String file="/tmp/fjs/in/test.txt";//HdfsCRUD.readFile(file);//HdfsCRUD.getModificationTime(file);//HdfsCRUD.getBlockLocations(file);//HdfsCRUD.getHostnames();String dir="/tmp/fjs/in/hdfs";HdfsCRUD.mkdir(dir);}/** hdoop fs -ls命令*/public static void list(String srcPath) { Configuration conf = new Configuration(); FileSystem fs = null;try {fs = FileSystem.get(conf);RemoteIterator<LocatedFileStatus>rmIterator = fs.listLocatedStatus(new Path(srcPath)); while (rmIterator.hasNext()) { Path path = rmIterator.next().getPath(); if(fs.isDirectory(path)){ System.out.println("-----------DirectoryName: "+path.getName()); } else if(fs.isFile(path)){ System.out.println("-----------FileName: "+path.getName()); } } } catch (IOException e) {System.out.println(e.getMessage());}} /** hdoop fs -cat命令*/public static void readFile(String file){ Configuration conf = new Configuration(); FileSystem fs = null;try { fs= FileSystem.get(conf); Path path = new Path(file); if(!fs.exists(path)){ System.out.println("file'"+ file+"' doesn't exist!"); return ; } FSDataInputStream in = fs.open(path); String filename = file.substring(file.lastIndexOf('/') + 1, file.length()); OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/tmp/"+filename))); byte[] b = new byte[1024]; int numBytes = 0; while ((numBytes = in.read(b)) > 0) { out.write(b,0,numBytes); } in.close(); out.close(); fs.close(); }catch (IOException e) {System.out.println(e.getMessage());} } /** * Gets the information about the file modified time. */ public static void getModificationTime(String source){ try{Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path srcPath = new Path(source); // Check if the file alreadyexists if (!(fs.exists(srcPath))) { System.out.println("No such destination " + srcPath);return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); FileStatus fileStatus = fs.getFileStatus(srcPath); long modificationTime =fileStatus.getModificationTime(); System.out.println("modified datetime: " + System.out.format("File %s; Modification time :%2$tI:%2$tM:%2$tS%n",filename,modificationTime)); }catch (IOException e) {System.out.println(e.getMessage());} } /** * Gets the file block location info */ public static void getBlockLocations(String source){ try{Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path srcPath = new Path(source); // Check if the file alreadyexists if (!(fs.exists(srcPath))) { System.out.println("No such destination " + srcPath); return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); FileStatus fileStatus = fs.getFileStatus(srcPath); BlockLocation[] blkLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen()); int blkCount = blkLocations.length; System.out.println("File :" + filename + "stored at:"); for (int i=0; i < blkCount; i++) { String[] hosts = blkLocations[i].getHosts(); System.out.println("host ip:" +System.out.format("Host %d: %s %n", i, hosts)); } }catch (IOException e) {System.out.println(e.getMessage());} } /** 獲取Hadoop集群中data node的DNS主機名*/public static void getHostnames (){ try{Configuration config = new Configuration(); FileSystem fs = FileSystem.get(config); DistributedFileSystem hdfs = (DistributedFileSystem) fs; DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats(); String[]names = new String[dataNodeStats.length]; for (int i = 0; i < dataNodeStats.length; i++) { names[i]= dataNodeStats[i].getHostName(); System.out.println("datenode hostname:"+(dataNodeStats[i].getHostName())); } }catch (IOException e) {System.out.println(e.getMessage());} } /** hadoop fs -mkdir命令*/public static void mkdir(String dir){ Configuration conf = new Configuration(); FileSystem fs = null; try { fs= FileSystem.get(conf); Path path = new Path(dir); if(!fs.exists(path)){ fs.mkdirs(path); System.out.println("create directory '"+dir+"' successfully!"); }else{ System.out.println("directory '"+dir+"' exits!"); } }catch (IOException e) { System.out.println("FileSystem get configuration with anerror"); e.printStackTrace(); }finally{ if(fs!= null){ try { fs.close(); }catch (IOException e) { System.out.println(e.getMessage()); new RuntimeException(e); } } } } /** 本地文件上傳到hdfs * hadoop fs -put命令*/public void copyFromLocal (String source, String dest) { Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path srcPath = new Path(source); Path dstPath = new Path(dest);// Check if the file alreadyexists if (!(fs.exists(dstPath))) { System.out.println("dstPathpath doesn't exist" ); System.out.println("No such destination " + dstPath); return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length());try{ //if the file exists in the destination path, it will throw exception. //fs.copyFromLocalFile(srcPath,dstPath);//remove and overwrite files with the method //copyFromLocalFile(booleandelSrc, boolean overwrite, Path src, Path dst) fs.copyFromLocalFile(false, true, srcPath, dstPath); System.out.println("File " + filename + "copied to " + dest); }catch(Exception e){ System.out.println(e.getMessage()); new RuntimeException(e); }finally{ fs.close(); } }catch (IOException e1) { System.out.println(e1.getMessage());new RuntimeException(e1); } } /** 添加一個文件到指定的目錄下*/public void addFile(String source, String dest) { // Conf object will readthe HDFS configuration parameters Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); // Create the destination pathincluding the filename. if (dest.charAt(dest.length() - 1) != '/') { dest= dest + "/" + filename; }else { dest= dest + filename; } // Check if the file alreadyexists Path path = new Path(dest); if (fs.exists(path)) { System.out.println("File " + dest + " already exists"); return; } // Create a new file and writedata to it.FSDataOutputStream out = fs.create(path); InputStream in = new BufferedInputStream(new FileInputStream(new File(source)));byte[] b = new byte[1024]; int numBytes = 0; //In this way read and write datato destination file.while ((numBytes = in.read(b)) > 0) {out.write(b,0, numBytes); } in.close(); out.close(); fs.close(); }catch (IOException e) { System.out.println(e.getMessage());new RuntimeException(e); } } /** 重新命名hdfs中的文件名稱*/public void renameFile (String fromthis, String tothis){ Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path fromPath = new Path(fromthis); Path toPath = new Path(tothis); if (!(fs.exists(fromPath))) { System.out.println("No such destination " + fromPath);return; } if (fs.exists(toPath)) { System.out.println("Already exists! " + toPath); return; } try{ boolean isRenamed = fs.rename(fromPath,toPath); //renames file name indeed.if(isRenamed){ System.out.println("Renamed from " + fromthis + " to " + tothis); } }catch(Exception e){ System.out.println(e.getMessage()); new RuntimeException(e); }finally{ fs.close(); } }catch (IOException e1) { System.out.println(e1.getMessage()); new RuntimeException(e1); } } /** 刪除指定的一個文件* hadoop fs -rm -r命令*/public void deleteFile(String file) { Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path path = new Path(file); if (!fs.exists(path)) { System.out.println("File " + file + " does not exists"); return; } /* * recursively delete the file(s) if it is adirectory. * If you want to mark the path that will bedeleted as * a result of closing the FileSystem. * deleteOnExit(Path f) */ fs.delete(new Path(file), true); fs.close(); }catch (IOException e) { System.out.println(e.getMessage()); new RuntimeException(e); } } }

總結

以上是生活随笔為你收集整理的MapReduce基础开发之八HDFS文件CRUD操作的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。

主站蜘蛛池模板: 1000部啪啪未满十八勿入 | 男人天堂网站 | 欧美a级黄色 | 欧美黑人性生活 | 欧洲精品视频在线观看 | 夫妻毛片 | 久久久香蕉 | 玖玖视频 | 欧美视频黄色 | 国产视频手机在线观看 | 国产精品二区三区 | 久久cao | 素人一区 | 中文在线а√天堂 | 成人激情四射网 | 国产无码久久精品 | 少妇高潮av久久久久久 | 亚洲成年人免费观看 | 欧美香蕉| 麻豆网 | 无码人妻aⅴ一区二区三区 国产高清一区二区三区四区 | 欧美精品免费在线 | 亚洲精品美女网站 | 爱视频福利网 | 中文字幕久久av | 日本123区| 黑人精品一区二区三区不 | 日韩在线精品视频 | 亚洲av无码一区二区三区在线播放 | 毛片视频网址 | 亚洲一区二区在线视频 | 伊人五月综合 | 性做久久久久久免费观看欧美 | 手机在线成人 | 日噜噜夜噜噜 | 亚洲精品国产99 | 日韩乱码视频 | 就爱啪啪网 | 亚洲日本网站 | 日一日射一射 | 欧美日韩99 | 国产片一区二区 | 亚洲激情电影在线 | 不卡的日韩av | 少妇丰满尤物大尺度写真 | 久久免费视频2 | 日韩欧美视频网站 | av大全在线 | 久久精品久久久久久 | 国产又粗又猛又大爽 | 中国美女黄色一级片 | 不卡中文av| 激情五月婷婷丁香 | 91国内在线视频 | 亚洲一二三四在线观看 | 亚洲熟妇色自偷自拍另类 | 美国免费高清电影在线观看 | 神马影院午夜伦理 | 亚洲福利电影网 | 青青草狠狠操 | 日本在线一区 | 日日射av| 国产真人无遮挡作爱免费视频 | 亚洲精品一二三四区 | 韩国成人在线视频 | 福利在线视频观看 | 成人永久视频 | h视频免费在线 | 校园伸入裙底揉捏1v1h | 亚洲经典三级 | 亚洲色图在线观看 | 国产女人视频 | 欧美视频xxxx | 欧美精品免费在线 | 中文字幕亚洲乱码熟女一区二区 | 欧美zzz物交 | 欧美一区二区三区成人精品 | 爆操老女人 | 免费一级黄色 | 国产青青青 | 日韩黄色片在线观看 | 国产精品丝袜一区 | 国产欧美二区 | 亚洲制服丝袜一区 | 亚洲无码久久久久久久 | 日韩午夜剧场 | 亚洲熟妇毛茸茸 | 国产精品免费一区二区三区在线观看 | 最新黄色网址在线观看 | 免费欧美一级 | 娇妻被老王脔到高潮失禁视频 | 不卡在线一区二区 | 性欧美又大又长又硬 | caoprom超碰 | 日本一级免费视频 | 青青艹视频 | 97色在线| 欧美黑人精品一区二区不卡 | 手机看片1024国产 |