java结合HADOOP集群文件上传下载
|
对HDFS上的文件进行上传和下载是对集群的基本操作,在《HADOOP权威指南》一书中,对文件的上传和下载都有代码的实例,但是对如何配置HADOOP客户端却是没有讲得很清楚,经过长时间的搜索和调试,总结了一下,如何配置使用集群的方法,以及自己测试可用的对集群上的文件进行操作的程序。首先,需要配置对应的环境变量: 复制代码 代码如下: 其中LD_LIBRARY_PATH是在调用时需要用到的库的路径,hadoop_CLASSPATH则是我们hadoop客户端里各种jar包 复制代码 代码如下: 运行的方法: 复制代码 代码如下: 但是在实际的使用过程中,会报No Permission之类的错误,或者你能保证代码没有问题的情况下,在运行的时候也会报一些奇奇怪怪的错误 复制代码 代码如下: 为什么会这样,书上只是很简单的: this.conf = new Configuration(); 将以上所提到的配置,全部配完之后,这个程序才能真正运行起来,所以配置是非常重要的一环。 以下是对应的工具的代码,有兴趣的看一下吧,使用的是文件流的方式来搞的,这样子也可以打通FTP和HDFS之间文件的互传:
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URL;
import java.io.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
public class HDFSUtil {
private String hdfs_node = "";
private String hdfs_path = "";
private String file_path = "";
private String hadoop_site = "";
private String hadoop_default = "";
private Configuration conf = null;
public HDFSUtil(String hdfs_node) {
this.hdfs_node = hdfs_node;
}
public String getHdfsNode() {
return this.hdfs_node;
}
public void setHdfsPath(String hdfs_path){
this.hdfs_path = hdfs_path;
}
public String getHdfsPath(){
return this.hdfs_path;
}
public void setFilePath(String file_path){
this.file_path = file_path;
}
public String getFilePath(){
return this.file_path;
}
public void setHadoopSite(String hadoop_site){
this.hadoop_site = hadoop_site;
}
public String getHadoopSite(){
return this.hadoop_site;
}
public void setHadoopDefault(String hadoop_default){
this.hadoop_default = hadoop_default;
}
public String getHadoopDefault(){
return this.hadoop_default;
}
public int setConfigure(boolean flag) {
if (flag == false){
if (this.getHadoopSite() == "" || this.getHadoopDefault() == ""){
return -1;
}
else {
this.conf = new Configuration(false);
conf.addResource(this.getHadoopDefault());
conf.addResource(this.getHadoopSite());
conf.set("fs.hdfs.impl",org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
conf.set("fs.file.impl",org.apache.hadoop.fs.LocalFileSystem.class.getName());
return 0;
}
}
this.conf = new Configuration();
return 0;
}
public Configuration getConfigure() {
return this.conf;
}
public int upLoad(String localName,String remoteName) throws FileNotFoundException,IOException {
InputStream inStream = null;
FileSystem fs = null;
try{
inStream = new BufferedInputStream(new FileInputStream(localName));
fs = FileSystem.get(URI.create(this.hdfs_node),this.conf);
OutputStream outStream = fs.create(new Path(remoteName),new Progressable() {
public void progress(){
System.out.print('.');
}
});
IOUtils.copyBytes(inStream,outStream,4096,true);
inStream.close();
return 0;
} catch (IOException e){
inStream.close();
e.printStackTrace();
return -1;
}
}
public int upLoad(InputStream inStream,IOException {
FileSystem fs = null;
try{
fs = FileSystem.get(URI.create(this.hdfs_node),true);
inStream.close();
return 0;
} catch (IOException e){
inStream.close();
e.printStackTrace();
return -1;
}
}
public int donwLoad(String remoteName,String localName,int lines) throws FileNotFoundException,IOException {
FileOutputStream fos = null;
InputStreamReader isr = null;
BufferedReader br = null;
String str = null;
OutputStreamWriter osw = null;
BufferedWriter buffw = null;
PrintWriter pw = null;
FileSystem fs = null;
InputStream inStream = null;
try {
fs = FileSystem.get(URI.create(this.hdfs_node + remoteName),this.conf);
inStream = fs.open(new Path(this.hdfs_node + remoteName));
fos = new FileOutputStream(localName);
osw = new OutputStreamWriter(fos,"UTF-8");
buffw = new BufferedWriter(osw);
pw = new PrintWriter(buffw);
isr = new InputStreamReader(inStream,"UTF-8");
br = new BufferedReader(isr);
while((str = br.readLine()) != null && lines > 0){
lines--;
pw.println(str);
}
} catch (IOException e){
throw new IOException("Couldn't write.",e);
} finally {
pw.close();
buffw.close();
osw.close();
fos.close();
inStream.close()
}
return 0;
}
//main to test
public static void main(String[] args){
String hdfspath = null;
String localname = null;
String hdfsnode = null;
int lines = 0;
if (args.length == 4){
hdfsnode = args[0];
hdfspath = args[1];
localname = args[2];
lines = Integer.parseInt(args[3]);
}
else{
hdfsnode = "hdfs://nj01-nanling-hdfs.dmop.baidu.com:54310";
hdfspath = "/app/ps/spider/wdmqa/wangweilong/test/HDFSUtil.java";
localname = "/home/work/workspace/project/dhc2-0/dhc/base/ftp/papapa";
lines = 5;
}
HDFSUtil hdfsutil = new HDFSUtil(hdfsnode);
hdfsutil.setFilePath(hdfsutil.getHdfsNode()+hdfspath);
hdfsutil.setHadoopSite("./hadoop-site.xml");
hdfsutil.setHadoopDefault("./hadoop-default.xml");
hdfsutil.setConfigure(false);
try {
hdfsutil.donwLoad(hdfspath,localname,lines);
} catch (IOException e){
e.printStackTrace();
}
}
如果想要了解FTP上文件的下载,请参考这篇文章: ftp下载工具 如果想要打通FTP和HDFS文件互传,只要创建一个类,调用这两篇文章中的工具的接口就可以搞定,自己写的代码,实测有效。 以上就是本文的全部内容了,希望能够对大家熟练掌握java有所帮助。 请您花一点时间将文章分享给您的朋友或者留下评论。我们将会由衷感谢您的支持! (编辑:安卓应用网) 【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容! |
