Commit acead85a by zzrdark

1.add hadoop

parent 5cd8a518
```yaml
spring:
hadoop:
hdfs:
# window配置
# hadoopConfigDir: D:\hadoop2.6_Win_x64\etc\hadoop
#linux or mac 配置
hadoopConfigDir: /home/fengte/hadoop/etc/hadoop/
hdfsUrl: hdfs://192.168.2.244:9000
defaultBlockSize: 128000000
bufferSize: 512000
```
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4" />
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>cneeds-server</artifactId>
<groupId>com.mx.cneeds</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>cneeds-common-hadoop-dfs</artifactId>
<properties>
<hadoop.version>2.7.3</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<!--<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>-->
</exclusions>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.mx.hbasefile.hadoop.hdfs.api;
import java.io.IOException;
import java.io.InputStream;
/**
* @ClassName HdfsOperations
* @Author zzrdark
* @Date 2019-08-14 17:48
* @Description TODO
**/
public interface HdfsOperations {
/**
* 保存文件
* @param dir
* @param name
* @param input
* @param length
* @param replication
* @throws IOException
*/
public void saveFile(String dir, String name,
InputStream input, long length, short replication) throws IOException;
/**
* 删除文件
* @param dir
* @param name
* @throws IOException
*/
public void deleteFile(String dir, String name) throws IOException;
/**
* 打开文件
* @param dir
* @param name
* @return
* @throws IOException
*/
public InputStream openFile(String dir, String name) throws IOException;
/**
* 新建文件夹
* @param dir
* @throws IOException
*/
public void mikDir(String dir) throws IOException;
/**
* 删除文件夹
* @param dir
* @throws IOException
*/
public void deleteDir(String dir) throws IOException;
void moveFile(String src, String dest) throws IOException;
}
package com.mx.hbasefile.hadoop.hdfs.api;
import com.mx.hbasefile.hadoop.hdfs.boot.HdfsConfigProperties;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import java.io.IOException;
import java.io.InputStream;
/**
* @ClassName HdfsTemplate
* @Author zzrdark
* @Date 2019-08-14 17:52
* @Description TODO
**/
public class HdfsTemplate implements HdfsOperations {
private FileSystem fileSystem;
private HdfsConfigProperties hdfsConfigProperties;
public HdfsTemplate(FileSystem fileSystem, HdfsConfigProperties hdfsConfigProperties) {
this.fileSystem = fileSystem;
this.hdfsConfigProperties = hdfsConfigProperties;
}
@Override
public void saveFile(String dir, String name, InputStream input, long length, short replication) throws IOException {
//查看dir是否存在 不存在则新增路劲
Path dirPath = new Path(dir);
if(!fileSystem.exists(dirPath)){
if (!fileSystem.mkdirs(dirPath, FsPermission.getDirDefault())){
//TODO 文件路劲新增失败
throw new IOException("文件路劲新增失败");
}
}
Long defaultBlockSize = hdfsConfigProperties.getDefaultBlockSize();
//判断文件大小 设定 blockSize
Path filePath = new Path(dir + "/" + name);
long blockSize = length <= defaultBlockSize/2 ? defaultBlockSize/2 : defaultBlockSize;
//设置默认权限
FSDataOutputStream fsdos = fileSystem.create(filePath,true,hdfsConfigProperties.getBufferSize(),replication,blockSize);
try {
fileSystem.setPermission(filePath,FsPermission.getFileDefault());
byte[] buffer = new byte[512 * 1024];
int len = -1;
while ((len=input.read(buffer))!=-1){
fsdos.write(buffer);
}
}finally{
fsdos.close();
input.close();
}
}
@Override
public void deleteFile(String dir, String name) throws IOException {
Path path = new Path(dir+"/"+name);
fileSystem.delete(path,false);
}
@Override
public InputStream openFile(String dir, String name) throws IOException {
Path path = new Path(dir+"/"+name);
return fileSystem.open(path);
}
@Override
public void mikDir(String dir) throws IOException {
fileSystem.mkdirs(new Path(dir));
}
@Override
public void deleteDir(String dir) throws IOException {
fileSystem.delete(new Path(dir), true);
}
@Override
public void moveFile(String src, String dest) throws IOException {
Path dirPath = new Path(dest);
if(!fileSystem.exists(dirPath)){
if (!fileSystem.mkdirs(dirPath, FsPermission.getDirDefault())){
//TODO 文件路劲新增失败
throw new IOException("文件路劲新增失败");
}
}
// fileSystem.moveToLocalFile(new Path(src),dirPath);
fileSystem.rename(new Path(src),dirPath);
}
}
package com.mx.hbasefile.hadoop.hdfs.boot;
import com.mx.hbasefile.hadoop.hdfs.api.HdfsTemplate;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.net.URI;
/**
* @ClassName HdfsAutoConfiguration
* @Author zzrdark
* @Date 2019-08-14 17:24
* @Description TODO
**/
@Configuration
@EnableConfigurationProperties(HdfsConfigProperties.class)
@ConditionalOnClass(FileSystem.class)
public class HdfsAutoConfiguration {
@Autowired
private HdfsConfigProperties hdfsConfigProperties;
@Bean
@ConditionalOnMissingBean(FileSystem.class)
public FileSystem fileSystem(){
try {
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
conf.addResource(new Path(hdfsConfigProperties.getHadoopConfigDir()+"/core-site.xml"));
conf.addResource(new Path(hdfsConfigProperties.getHadoopConfigDir()+"/hdfs-site.xml"));
return FileSystem.get(new URI(hdfsConfigProperties.getHdfsUrl()),conf);
} catch (Exception e) {
e.printStackTrace();
//TODO 日志
}
return null;
}
@Bean
public HdfsTemplate hdfsTemplate(@Autowired FileSystem fileSystem){
HdfsTemplate hdfsTemplate = new HdfsTemplate(fileSystem,hdfsConfigProperties);
return hdfsTemplate;
}
}
package com.mx.hbasefile.hadoop.hdfs.boot;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
/**
* @ClassName HdfsConfigProperties
* @Author zzrdark
* @Date 2019-08-14 17:17
* @Description TODO
**/
@Configuration
@ConfigurationProperties(prefix = "spring.hadoop.hdfs")
public class HdfsConfigProperties {
private String hadoopConfigDir;
private String hdfsUrl;
private Long defaultBlockSize;
private Integer bufferSize;
public Integer getBufferSize() {
return bufferSize;
}
public void setBufferSize(Integer bufferSize) {
this.bufferSize = bufferSize;
}
public String getHadoopConfigDir() {
return hadoopConfigDir;
}
public void setHadoopConfigDir(String hadoopConfigDir) {
this.hadoopConfigDir = hadoopConfigDir;
}
public String getHdfsUrl() {
return hdfsUrl;
}
public void setHdfsUrl(String hdfsUrl) {
this.hdfsUrl = hdfsUrl;
}
public Long getDefaultBlockSize() {
return defaultBlockSize;
}
public void setDefaultBlockSize(Long defaultBlockSize) {
this.defaultBlockSize = defaultBlockSize;
}
}
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
com.mx.hbasefile.hadoop.hdfs.boot.HdfsAutoConfiguration
\ No newline at end of file
......@@ -23,6 +23,7 @@
<module>cneeds-common-utils</module>
<module>cneeds-server-authorization</module>
<module>cneeds-common-pojo</module>
<module>cneeds-common-hadoop-dfs</module>
</modules>
<properties>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment