一个Hadoop HDFS操作类

来源:互联网 发布:装配线算法 编辑:程序博客网 时间:2024/06/06 06:38

一个Hadoop HDFS操作类


package com.viburnum.util;import java.net.URI;import java.text.SimpleDateFormat;import java.util.Date;import java.io.*;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.BlockLocation;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class HdfsUtil {// HDFS APIpublic static String hdfsUrl = "";// car information save directoryprivate static String carInfoDir = "/car/lbs/";// lbs file nameprivate static String carInfoFile = "";private static String carInfoFilePrefix = "lbs_";// properties fileprivate static String properitesFile = "/viburnum.properties";private static Configuration conf = new Configuration();private static FileSystem hdfs;public HdfsUtil() {hdfsUrl = PropUtil.getResourceValue(properitesFile, "hdfs.url");carInfoDir = PropUtil.getResourceValue(properitesFile, "car.lbs.dir");try {conf.setBoolean("dfs.support.append", true);hdfs = FileSystem.get(URI.create(hdfsUrl), conf);} catch (IOException e) {e.printStackTrace();}SimpleDateFormat sdfDate = new SimpleDateFormat("yyyyMMdd");SimpleDateFormat sdfTime = new SimpleDateFormat("HHmmssSSS");String fileDate = sdfDate.format(new Date());String fileTime = sdfTime.format(new Date()) + System.nanoTime();carInfoFile = carInfoDir + "/" + fileDate + "/" + carInfoFilePrefix + fileTime + ".txt";}public static String getProperitesFile() {return properitesFile;}public static void setProperitesFile(String properitesFile) {HdfsUtil.properitesFile = properitesFile;}public static String getHdfsUrl() {return hdfsUrl;}public static void setHdfsUrl(String hdfsUrl) {HdfsUtil.hdfsUrl = hdfsUrl;}public static String getCarInfoDir() {return carInfoDir;}public static void setCarInfoDir(String carInfoDir) {HdfsUtil.carInfoDir = carInfoDir;}public static String getCarInfoFile() {return carInfoFile;}public static void setCarInfoFile(String carInfoFile) {HdfsUtil.carInfoFile = carInfoFile;}public static Configuration getConf() {return conf;}public static void setConf(Configuration conf) {HdfsUtil.conf = conf;}/** * create HDFS folder *  * @param dirPath * @return */public static void createDir(String dirPath) throws Exception {hdfs.mkdirs(new Path(dirPath));}/** * delete HDFS folder *  * @param dirPath * @return */@SuppressWarnings("deprecation")public static void deleteDir(String dirPath) throws Exception {hdfs.delete(new Path(dirPath));}/** * create a file with default name *  * @param filePath * @return */public static void createFile(String content) throws Exception {FSDataOutputStream out = hdfs.create(new Path(getCarInfoFile()));out.write((content + "\n").getBytes());out.close();}/** * create a file with given name *  * @param fileName * @param content * @throws Exception */public static void createFile(String fileName, String content) throws Exception {setCarInfoFile(fileName);FSDataOutputStream out = hdfs.create(new Path(fileName));out.write((content + "\n").getBytes());out.close();}/** * create a file if append *  * @param content * @throws Exception */public static void createFile(String content, Boolean append) throws Exception {if (append == true){Path fileName = new Path(getCarInfoFile());if (!hdfs.exists(fileName)) {hdfs.create(fileName);}InputStream in = new ByteArrayInputStream(content.getBytes());FSDataOutputStream out = hdfs.append(new Path(getCarInfoFile()));IOUtils.copyBytes(in, out, conf);out.close();} else {createFile(content);}}/** * writing content append to a file use the file path *  * @param filePath * @param fileContent * @throws Exception */public static void writeFile(String fileName, String content)throws Exception {InputStream in = new BufferedInputStream(new ByteArrayInputStream(content.getBytes()));OutputStream out = hdfs.append(new Path(fileName));IOUtils.copyBytes(in, out, 4096, true);}/** * normal read one file's all content and append to another  *  * @param content * @throws Exception */public static void appendAll(String content) throws Exception{FSDataOutputStream out = hdfs.create(new Path(getCarInfoFile()));byte[] contentBytes = (content + "\n").getBytes();byte[] oldContentBytes = readFile(getCarInfoFile());out.write(byteMerge(contentBytes, oldContentBytes));out.close();}/** * rename a file *  * @param oldPath * @param newPath * @return */public static void renameFile(String oldPath, String newPath)throws Exception {hdfs.rename(new Path(oldPath), new Path(newPath));}/** * delete a file *  * @param hadoopFile * @return isDeleted */public static boolean deleteFile(String hadoopFile) throws Exception {@SuppressWarnings("deprecation")boolean isDeleted = hdfs.delete(new Path(hadoopFile));return isDeleted;}/** * upload a local file *  * @param localPath * @param hadoopPath * @return */public static void uploadLocalFile(String localPath, String hadoopPath)throws Exception {hdfs.copyFromLocalFile(new Path(localPath), new Path(hadoopPath));}/** * read the file bytes *  * @param hadoopFile * @return buffer */public static byte[] readFile(String hadoopFile) throws Exception {Path path = new Path(hadoopFile);if (hdfs.exists(path)) {FSDataInputStream in = hdfs.open(path);FileStatus stat = hdfs.getFileStatus(path);byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))];in.readFully(0, buffer);in.close();return buffer;} else {throw new Exception("the file is not found .");}}/** * list files under the folder *  * @param hadoopPath * @return fileString */public static String listFiles(String hadoopPath) throws Exception {Path dst = new Path(hadoopPath);FileStatus[] files = hdfs.listStatus(dst);String fileString = "";for (FileStatus file : files) {System.out.println(file.getPath().toString());fileString += file.getPath().toString() + " ";}return fileString;}/** * list block info of the file *  * @param hadoopPath * @return blockString */public static String getBlockInfo(String hadoopPath) throws Exception {FileStatus fileStatus = hdfs.getFileStatus(new Path(hadoopPath));BlockLocation[] blkloc = hdfs.getFileBlockLocations(fileStatus, 0,fileStatus.getLen());String blockString = "";for (BlockLocation loc : blkloc) {for (int i = 0; i < loc.getHosts().length; i++)System.out.println(loc.getHosts()[i]);}return blockString;}/** * merge two byte[] *  * @param byte_1 * @param byte_2 * @return byte_3 */public static byte[] byteMerge(byte[] byte_1, byte[] byte_2){byte[] byte_3 = new byte[byte_1.length+byte_2.length];System.arraycopy(byte_1, 0, byte_3, 0, byte_1.length);System.arraycopy(byte_2, 0, byte_3, byte_1.length, byte_2.length);return byte_3;}public void finalize() throws Throwable{hdfs.close();super.finalize();}}


0 0
原创粉丝点击