Explorar el Código

Merge branch 'master' of http://121.42.53.174:3000/XMTT/storage.git

jiapeng %!s(int64=6) %!d(string=hace) años
padre
commit
a4fd51b081

+ 1 - 1
pom.xml

@ -73,7 +73,7 @@
73 73
		<dependency>
74 74
			<groupId>org.apache.hadoop</groupId>
75 75
			<artifactId>hadoop-client</artifactId>
76
			<version>3.0.3</version>
76
			<version>2.9.2</version>
77 77
		</dependency>
78 78
<!-- 		<dependency>
79 79
			<groupId>org.apache.hadoop</groupId>

+ 32 - 8
src/main/java/com/ekexiu/project/hdfs/HdfsService.java

@ -26,8 +26,10 @@ public class HdfsService {
26 26

27 27

28 28
	public void setRemoteUrl(String remoteUrl) {
29
		System.setProperty("HADOOP_USER_NAME", "hadoop");
29 30
		this.remoteUrl = remoteUrl;
30 31
		configuration.set("fs.default.name", this.remoteUrl);
32
		configuration.set("dfs.client.use.datanode.hostname", "true");
31 33
		try {
32 34
			dfs = (DistributedFileSystem) FileSystem.get(configuration);
33 35
		} catch (IOException e) {
@ -36,17 +38,37 @@ public class HdfsService {
36 38
	}
37 39
	
38 40
	public void upload(String path,File src) throws IOException{
41
//		byte[] buffer = new byte[8192];
42
		Path dstPath = new Path(root,path);
39 43
		if(src.exists() && src.isDirectory()){
40
			Path dst  =  new Path(root,path);
41
			if(this.dfs.exists(dst)){
42
				if(!this.dfs.delete(dst,true)){
43
					throw new IOException("delete "+configuration.get("fs.default.name")+path.toString()+" error");
44
			if(!this.dfs.exists(dstPath)){
45
				if(!this.dfs.mkdirs(dstPath)){
46
					throw new IOException("mkdir "+configuration.get("fs.default.name")+"/"+path+" error");
44 47
				}
45 48
			}
46
			if(!this.dfs.mkdirs(dst)){
47
				throw new IOException("mkdir "+configuration.get("fs.default.name")+path.toString()+" error");
49
			
50
			
51
			for(File file:src.listFiles()){
52
				Path dst = new Path(dstPath,file.getName());
53
				if(this.dfs.exists(dst)){
54
					if(!this.dfs.delete(dst,true)){
55
						throw new IOException("delete "+configuration.get("fs.default.name")+"/"+path+"/"+file.getName()+" error");
56
					}
57
				}
58
				this.dfs.copyFromLocalFile(new Path(file.getAbsolutePath()), dst);
59
				
60
//				FSDataOutputStream outStream = dfs.create(dst);
61
//				try {
62
//					InputStream in = new FileInputStream(file);
63
//					try {
64
//						IoUtil.copy(in, outStream, buffer);
65
//					}finally {
66
//						in.close();
67
//					}
68
//				}finally {
69
//					outStream.close();
70
//				}
48 71
			}
49
			this.dfs.copyFromLocalFile(new Path(src.getAbsolutePath()), dst);
50 72
		}
51 73
		
52 74
	}
@ -55,7 +77,9 @@ public class HdfsService {
55 77
	public static void main(String args[]) throws Exception{
56 78
		HdfsService service = new HdfsService();
57 79
		service.setRemoteUrl("hdfs://39.97.161.48:9000");
58
		service.upload("11",new File("/ekexiu/storage_asd/dst/a"));
80
		service.upload("22",new File("/home/java1/test"));
81
		
82
		System.out.println("11111111111111111111111111111111");
59 83
	}
60 84

61 85
}

+ 1 - 1
src/main/java/com/ekexiu/project/storage/diacrisis/AiCleanTask.java

@ -32,7 +32,7 @@ public class AiCleanTask implements Runnable{
32 32
		File file = new File(aiTaskService.getDstPath(), taskId);
33 33
		file = new File(file,"data");
34 34
			try {
35
				URL u = new URL("http://127.0.0.1:5001//desensitize?pi=" + file.getAbsolutePath());
35
				URL u = new URL("http://127.0.0.1:5001/desensitize?pi=" + file.getAbsolutePath());
36 36
				HttpURLConnection httpURLConnection = (HttpURLConnection) u.openConnection();
37 37
				if (httpURLConnection.getResponseCode() == 200) {
38 38
					InputStream in = httpURLConnection.getInputStream();