在eclipse中引入hadoop的核心包,我用的是hadoop-core-1.0.3.jar。
写一个测试类,很简单,就是用FileSystem类来实现,和hadoop的shell类似
注意:hadoop采用流计算,都是数据都是以字节流的形式传递的。将本地文件上传到hdfs也要读文件转成字节流再写入。
import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; public class test { public static void main(String[] args) throws Exception { byte[] buff = "hello hadoop world!\n".getBytes(); String hadoop_path = "hdfs://master:9099/mr/test1234"; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(hadoop_path), conf); Path path = new Path(hadoop_path); FSDataOutputStream out = fs.create(path); // 控制复本数量-wt fs.setReplication(path, (short) 1); out.write(buff); out.close(); } }
原文:http://blog.csdn.net/smile0198/article/details/18967641