package com.zdjizhi.utils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; public class HdfsUtils { private static final Logger logger = LoggerFactory.getLogger(HdfsUtils.class); private static FileSystem fileSystem; static { Configuration configuration = new Configuration(); try { //创建fileSystem,用于连接hdfs fileSystem = FileSystem.get(configuration); } catch (IOException e) { throw new RuntimeException(e); } } public static byte[] getFileBytes(String filePath) throws IOException { FSDataInputStream open = null; try { open = fileSystem.open(new Path(filePath)); byte[] bytes = new byte[open.available()]; open.read(0,bytes,0, open.available()); return bytes; } finally { if (open != null) { open.close(); } } } public static void uploadFileByBytes(String filePath, byte[] bytes) throws IOException { FSDataOutputStream fsDataOutputStream = null; try { fsDataOutputStream = fileSystem.create(new Path(filePath), true); fsDataOutputStream.write(bytes); } finally { if (fsDataOutputStream != null) { fsDataOutputStream.close(); } } } }