|
@ -29,6 +29,7 @@ public class HdfsService {
|
29
|
29
|
System.setProperty("HADOOP_USER_NAME", "hadoop");
|
30
|
30
|
this.remoteUrl = remoteUrl;
|
31
|
31
|
configuration.set("fs.default.name", this.remoteUrl);
|
|
32
|
configuration.set("dfs.client.use.datanode.hostname", "true");
|
32
|
33
|
try {
|
33
|
34
|
dfs = (DistributedFileSystem) FileSystem.get(configuration);
|
34
|
35
|
} catch (IOException e) {
|
|
@ -37,6 +38,7 @@ public class HdfsService {
|
37
|
38
|
}
|
38
|
39
|
|
39
|
40
|
public void upload(String path,File src) throws IOException{
|
|
41
|
// byte[] buffer = new byte[8192];
|
40
|
42
|
Path dstPath = new Path(root,path);
|
41
|
43
|
if(src.exists() && src.isDirectory()){
|
42
|
44
|
if(!this.dfs.exists(dstPath)){
|
|
@ -53,7 +55,19 @@ public class HdfsService {
|
53
|
55
|
throw new IOException("delete "+configuration.get("fs.default.name")+"/"+path+"/"+file.getName()+" error");
|
54
|
56
|
}
|
55
|
57
|
}
|
56
|
|
this.dfs.copyFromLocalFile(new Path(src.getAbsolutePath()), dst);
|
|
58
|
this.dfs.copyFromLocalFile(new Path(file.getAbsolutePath()), dst);
|
|
59
|
|
|
60
|
// FSDataOutputStream outStream = dfs.create(dst);
|
|
61
|
// try {
|
|
62
|
// InputStream in = new FileInputStream(file);
|
|
63
|
// try {
|
|
64
|
// IoUtil.copy(in, outStream, buffer);
|
|
65
|
// }finally {
|
|
66
|
// in.close();
|
|
67
|
// }
|
|
68
|
// }finally {
|
|
69
|
// outStream.close();
|
|
70
|
// }
|
57
|
71
|
}
|
58
|
72
|
}
|
59
|
73
|
|
|
@ -63,7 +77,7 @@ public class HdfsService {
|
63
|
77
|
public static void main(String args[]) throws Exception{
|
64
|
78
|
HdfsService service = new HdfsService();
|
65
|
79
|
service.setRemoteUrl("hdfs://39.97.161.48:9000");
|
66
|
|
service.upload("25",new File("/ekexiu/storage_asd/task/result/test/data_desen"));
|
|
80
|
service.upload("22",new File("/home/java1/test"));
|
67
|
81
|
|
68
|
82
|
System.out.println("11111111111111111111111111111111");
|
69
|
83
|
}
|