jiapeng 6 ans auparavant
Parent
commit
442b2228ed
2 fichiers modifiés avec 68 ajouts et 68 suppressions
  1. 2 2
      pom.xml
  2. 66 66
      src/main/java/com/ekexiu/project/hdfs/HdfsService.java

+ 2 - 2
pom.xml

@ -70,11 +70,11 @@
70 70
			<artifactId>hadoop-hdfs</artifactId>
71 71
			<version>3.0.3</version>
72 72
		</dependency> -->
73
		<dependency>
73
<!-- 		<dependency>
74 74
			<groupId>org.apache.hadoop</groupId>
75 75
			<artifactId>hadoop-client</artifactId>
76 76
			<version>2.9.2</version>
77
		</dependency>
77
		</dependency> -->
78 78
<!-- 		<dependency>
79 79
			<groupId>org.apache.hadoop</groupId>
80 80
			<artifactId>hadoop-common</artifactId>

+ 66 - 66
src/main/java/com/ekexiu/project/hdfs/HdfsService.java

@ -2,84 +2,84 @@ package com.ekexiu.project.hdfs;
2 2
import java.io.File;
3 3
import java.io.IOException;
4 4

5
import org.apache.hadoop.conf.Configuration;
6
import org.apache.hadoop.fs.FileSystem;
7
import org.apache.hadoop.fs.Path;
8
import org.apache.hadoop.hdfs.DistributedFileSystem;
5
//import org.apache.hadoop.conf.Configuration;
6
//import org.apache.hadoop.fs.FileSystem;
7
//import org.apache.hadoop.fs.Path;
8
//import org.apache.hadoop.hdfs.DistributedFileSystem;
9 9
import org.jfw.apt.annotation.Bean;
10 10
@Bean
11 11
public class HdfsService {
12
	private Configuration configuration = new Configuration();
13
	private DistributedFileSystem dfs;
14
	private Path root = new Path("/");
15
	private String remoteUrl;
16
	
17

18

19
	public void setRootPath(String rootPath){
20
		this.root = new Path(rootPath);
21
	}
22
	
23
	public String getRemoteUrl() {
24
		return remoteUrl;
25
	}
26

27

28
	public void setRemoteUrl(String remoteUrl) {
29
		System.setProperty("HADOOP_USER_NAME", "hadoop");
30
		this.remoteUrl = remoteUrl;
31
		configuration.set("fs.default.name", this.remoteUrl);
32
		configuration.set("dfs.client.use.datanode.hostname", "true");
33
		try {
34
			dfs = (DistributedFileSystem) FileSystem.get(configuration);
35
		} catch (IOException e) {
36
			throw new RuntimeException(e);
37
		}
38
	}
12
//	private Configuration configuration = new Configuration();
13
//	private DistributedFileSystem dfs;
14
//	private Path root = new Path("/");
15
//	private String remoteUrl;
16
//	
17
//
18
//
19
//	public void setRootPath(String rootPath){
20
//		this.root = new Path(rootPath);
21
//	}
22
//	
23
//	public String getRemoteUrl() {
24
//		return remoteUrl;
25
//	}
26
//
27
//
28
//	public void setRemoteUrl(String remoteUrl) {
29
//		System.setProperty("HADOOP_USER_NAME", "hadoop");
30
//		this.remoteUrl = remoteUrl;
31
//		configuration.set("fs.default.name", this.remoteUrl);
32
//		configuration.set("dfs.client.use.datanode.hostname", "true");
33
//		try {
34
//			dfs = (DistributedFileSystem) FileSystem.get(configuration);
35
//		} catch (IOException e) {
36
//			throw new RuntimeException(e);
37
//		}
38
//	}
39 39
	
40 40
	public void upload(String path,File src) throws IOException{
41 41
//		byte[] buffer = new byte[8192];
42
		Path dstPath = new Path(root,path);
43
		if(src.exists() && src.isDirectory()){
44
			if(!this.dfs.exists(dstPath)){
45
				if(!this.dfs.mkdirs(dstPath)){
46
					throw new IOException("mkdir "+configuration.get("fs.default.name")+"/"+path+" error");
47
				}
48
			}
49
			
50
			
51
			for(File file:src.listFiles()){
52
				Path dst = new Path(dstPath,file.getName());
53
				if(this.dfs.exists(dst)){
54
					if(!this.dfs.delete(dst,true)){
55
						throw new IOException("delete "+configuration.get("fs.default.name")+"/"+path+"/"+file.getName()+" error");
56
					}
57
				}
58
				this.dfs.copyFromLocalFile(new Path(file.getAbsolutePath()), dst);
59
				
60
//				FSDataOutputStream outStream = dfs.create(dst);
61
//				try {
62
//					InputStream in = new FileInputStream(file);
63
//					try {
64
//						IoUtil.copy(in, outStream, buffer);
65
//					}finally {
66
//						in.close();
42
//		Path dstPath = new Path(root,path);
43
//		if(src.exists() && src.isDirectory()){
44
//			if(!this.dfs.exists(dstPath)){
45
//				if(!this.dfs.mkdirs(dstPath)){
46
//					throw new IOException("mkdir "+configuration.get("fs.default.name")+"/"+path+" error");
47
//				}
48
//			}
49
//			
50
//			
51
//			for(File file:src.listFiles()){
52
//				Path dst = new Path(dstPath,file.getName());
53
//				if(this.dfs.exists(dst)){
54
//					if(!this.dfs.delete(dst,true)){
55
//						throw new IOException("delete "+configuration.get("fs.default.name")+"/"+path+"/"+file.getName()+" error");
67 56
//					}
68
//				}finally {
69
//					outStream.close();
70 57
//				}
71
			}
72
		}
73
		
58
//				this.dfs.copyFromLocalFile(new Path(file.getAbsolutePath()), dst);
59
//				
60
////				FSDataOutputStream outStream = dfs.create(dst);
61
////				try {
62
////					InputStream in = new FileInputStream(file);
63
////					try {
64
////						IoUtil.copy(in, outStream, buffer);
65
////					}finally {
66
////						in.close();
67
////					}
68
////				}finally {
69
////					outStream.close();
70
////				}
71
//			}
72
//		}
73
//		
74 74
	}
75 75
	
76 76
	
77 77
	public static void main(String args[]) throws Exception{
78
		HdfsService service = new HdfsService();
79
		service.setRemoteUrl("hdfs://39.97.161.48:9000");
80
		service.upload("22",new File("/home/java1/test"));
81
		
82
		System.out.println("11111111111111111111111111111111");
78
//		HdfsService service = new HdfsService();
79
//		service.setRemoteUrl("hdfs://39.97.161.48:9000");
80
//		service.upload("22",new File("/home/java1/test"));
81
//		
82
//		System.out.println("11111111111111111111111111111111");
83 83
	}
84 84

85 85
}