pom.xml:
<!--hadoop客户端FileSystem-->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.7.3</version>
</dependency>
代码:
public class UploadFileToHdfs { private static Logger LOG = LoggerFactory.getLogger(UploadFileToHdfs.class); private static FileSystem fs; public static FileSystem getFs() { if (fs == null) { synchronized (UploadFileToHdfs.class) { if (fs == null) { LOG.info("准备初始化hdfs连接"); String defaultHdfsConfDir = "/etc/hadoop/conf"; String hdfsConfEnv = System.getenv(CommonConstant.HADOOP_CONF_DIR_VAR);//环境变量 if (StringUtils.isNotBlank(hdfsConfEnv)) { defaultHdfsConfDir = hdfsConfEnv; } LOG.info("读取hdfs配置文件目录:{}", defaultHdfsConfDir); File hdfsSiteFile = new File(defaultHdfsConfDir + File.separator + "hdfs-site.xml"); File coreSiteFile = new File(defaultHdfsConfDir + File.separator + "core-site.xml"); Preconditions.checkArgument(hdfsSiteFile.exists(), "hdfs-site.xml文件缺失"); Preconditions.checkArgument(coreSiteFile.exists(), "core-site.xml文件缺失"); Configuration conf = new Configuration(); try { conf.addResource(hdfsSiteFile.toURI().toURL()); conf.addResource(coreSiteFile.toURI().toURL()); conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem"); fs = FileSystem.get(conf); } catch (Exception e) { LOG.error("连接hdfs异常", e); } LOG.info("初始化hdfs连接成功"); } } } return fs; }