17370845950

Hadoop基础教程-第4章 HDFS的Java API(4.6 Java API应用)

第4章 hdfs的java api

4.6 Java API应用

4.6.1 上传文件

首先,在本地客户端创建一个文件,例如在D盘新建一个名为word2.txt的文档,内容可以随意编写。

在Eclipse中编写Java程序:

package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class PutFile {
    public static void main(String[] args) throws Exception {
        String local = "D:\\word2.txt";
        String dest = "hdfs://192.168.80.131:9000/user/root/input/word2.txt";
        Configuration cfg = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(dest), cfg, "root");
        fs.copyFromLocalFile(new Path(local), new Path(dest));
        fs.close();
    }
}

需要注意的是,

String dest="hdfs://192.168.80.131:9000/user/root/input/word2.txt"
应与core-site.xml文件中的fs.defaultFS配置一致,其值为
hdfs://node1:9000
。由于本地Windows系统的hosts文件未配置node1,这里使用IP地址表示。

执行结果如下:

补充说明:

FileSystem fs= FileSystem.get(URI.create(dest),cfg,"root");
语句中指定了root用户,这是因为Windows系统默认用户为Administrator。如果程序中未指定root用户,可能会抛出异常:Permission denied: user=Administrator

4.6.2 下载文件

package cn.hadron.hdfsDemo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import java.net.URI;
public class GetFile {
    public static void main(String[] args) throws IOException {
        String hdfsPath = "hdfs://192.168.80.131:9000/user/root/input/words.txt";
        String localPath = "D:/copy_words.txt";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
        Path hdfs_path = new Path(hdfsPath);
        Path local_path = new Path(localPath);
        fs.copyToLocalFile(hdfs_path, local_path);
        fs.close();
    }
}

4.6.3 创建HDFS目录

package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CreateDir {
    public static void main(String[] args) throws Exception {
        String url = "hdfs://192.168.80.131:9000/tmp/";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(url), conf, "root");
        boolean b = fs.mkdirs(new Path(url));
        System.out.println(b);
        fs.close();
    }
}

4.6.4 删除HDFS文件或文件夹

首先,将一个文件上传到HDFS的/tmp目录:

[root@node1 ~]# hdfs dfs -put /root/words.txt /tmp
[root@node1 ~]# hdfs dfs -ls /tmp
Found 1 items
-rw-r--r--   3 root supergroup         55 2017-05-21 04:57 /tmp/words.txt

删除文件或文件夹的Java代码:

package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class DeleteFile {
    public static void main(String[] args) throws Exception {
        String uri = "hdfs://192.168.80.131:9000/tmp";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf, "root");
        boolean b = fs.delete(new Path(uri), true);
        System.out.println(b);
        fs.close();
    }
}

通过命令查看HDFS目录,显然HDFS的/tmp目录已被删除:

[root@node1 ~]# hdfs dfs -ls /
Found 2 items
drwxr-xr-x   - root supergroup          0 2017-05-14 09:40 /abc
drwxr-xr-x   - root supergroup          0 2017-05-14 09:37 /user

4.6.5 下载HDFS目录

package cn.hadron.hdfsDemo;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CopyToLocalFile {
    public static void main(String[] args) throws IOException {
        String hdfsPath = "hdfs://192.168.80.131:9000/user/root/input";
        String localPath = "D:\\input";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
        Path hdfs_path = new Path(hdfsPath);
        Path local_path = new Path(localPath);
        fs.copyToLocalFile(hdfs_path, local_path);
        fs.close();
    }
}

在D盘可以看到下载的input目录文件:

其中.crc文件是校验文件,Hadoop系统为了保证数据的一致性,会生成相应的校验文件,并在读写时进行校验,确保数据的准确性。

4.6.6 上传本地目录(文件夹)

首先,在本地准备一个待上传的目录,这里将刚才下载的input目录重命名为words,并删除.crc校验文件:

上传本地目录的Java代码:

package cn.hadron.hdfsDemo;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class CopyFromLocalFile {
    public static void main(String[] args) throws Exception {
        String hdfsPath = "hdfs://192.168.80.131:9000/user/root/";
        String localPath = "D:\\words";
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf, "root");
        Path hdfs_path = new Path(hdfsPath);
        Path local_path = new Path(localPath);
        fs.copyFromLocalFile(local_path, hdfs_path);
        fs.close();
    }
}

通过命令查看HDFS目录:

[root@node1 ~]# hdfs dfs -ls /user/root
Found 2 items
drwxr-xr-x   - root supergroup          0 2017-05-21 03:48 /user/root/input
drwxr-xr-x   - root supergroup          0 2017-05-21 05:21 /user/root/words
[root@node1 ~]# hdfs dfs -ls /user/root/words
Found 2 items
-rw-r--r--   3 root supergroup         30 2017-05-21 05:21 /user/root/words/word2.txt
-rw-r--r--   3 root supergroup         55 2017-05-21 05:21 /user/root/words/words.txt