全部程序如下: - import java.io.IOException;
- import java.net.URI;
- import java.net.URISyntaxException;
-
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.fs.FSDataInputStream;
- import org.apache.hadoop.fs.FSDataOutputStream;
- import org.apache.hadoop.fs.FileStatus;
- import org.apache.hadoop.fs.FileSystem;
- import org.apache.hadoop.fs.FileUtil;
- import org.apache.hadoop.fs.Path;
- import org.apache.hadoop.io.IOUtils;
-
-
- public class HDFSTest {
-
-
- public static void WriteToHDFS(String file, String words) throws IOException, URISyntaxException
- {
- Configuration conf = new Configuration();
- FileSystem fs = FileSystem.get(URI.create(file), conf);
- Path path = new Path(file);
- FSDataOutputStream out = fs.create(path);
-
-
- out.writeBytes(words);
- out.write(words.getBytes("UTF-8"));
-
- out.close();
-
-
-
-
- }
-
- public static void ReadFromHDFS(String file) throws IOException
- {
- Configuration conf = new Configuration();
- FileSystem fs = FileSystem.get(URI.create(file), conf);
- Path path = new Path(file);
- FSDataInputStream in = fs.open(path);
-
- IOUtils.copyBytes(in, System.out, 4096, true);
-
-
-
-
-
-
-
-
-
-
- }
-
- public static void DeleteHDFSFile(String file) throws IOException
- {
- Configuration conf = new Configuration();
- FileSystem fs = FileSystem.get(URI.create(file), conf);
- Path path = new Path(file);
-
- fs.delete(path,true);
- fs.close();
- }
-
- public static void UploadLocalFileHDFS(String src, String dst) throws IOException
- {
- Configuration conf = new Configuration();
- FileSystem fs = FileSystem.get(URI.create(dst), conf);
- Path pathDst = new Path(dst);
- Path pathSrc = new Path(src);
-
- fs.copyFromLocalFile(pathSrc, pathDst);
- fs.close();
- }
-
- public static void ListDirAll(String DirFile) throws IOException
- {
- Configuration conf = new Configuration();
- FileSystem fs = FileSystem.get(URI.create(DirFile), conf);
- Path path = new Path(DirFile);
-
- FileStatus[] status = fs.listStatus(path);
-
- for(FileStatus f: status)
- {
- System.out.println(f.getPath().toString());
- }
-
- Path[] listedPaths = FileUtil.stat2Paths(status);
- for (Path p : listedPaths){
- System.out.println(p.toString());
- }
- }
-
- public static void main(String [] args) throws IOException, URISyntaxException
- {
-
- ListDirAll("hdfs://ubuntu:9000/user/kqiao");
-
- String fileWrite = "hdfs://ubuntu:9000/user/kqiao/test/FileWrite";
- String words = "This words is to write into file!\n";
- WriteToHDFS(fileWrite, words);
-
- ReadFromHDFS(fileWrite);
-
- DeleteHDFSFile(fileWrite);
-
-
-
- }
- }
FSDataOutputStream os = hdfs.create(new Path(args[0]));
注意:在os.flush() 刷新数据流; 有时写入的文件不能立即被其他读者看见,只有大于一个块时其他读者才能看见第一个块,但还是不能看见当前块。可以使用out.sync() 强制所有缓存与数据节点同步。其实在每一个os.close()中隐含了一个sync()的调用
|