Hadoop之——HDFS操作实例,hadoophdfs实例


      转载请注明出处:http://blog.csdn.net/l1028386804/article/details/45921443

      本文通过两种方式来讲解hadoop中对HDFS文件系统的操作,第一种方式是命令行,第二种方式是通过java代码来实现。

      一、命令行方式:hadoop fs xxx

         hadoop fs -ls  /    查看hdfs的根目录下的内容的
         hadoop fs -lsr /    递归查看hdfs的根目录下的内容的
         hadoop fs -mkdir /d1    在hdfs上创建文件夹d1
         hadoop fs -put <linux source> <hdfs destination> 把数据从linux上传到hdfs的特定路径中
         hadoop fs -get <hdfs source> <linux destination> 把数据从hdfs下载到linux的特定路径下
         hadoop fs -text <hdfs文件>    查看hdfs中的文件
         hadoop fs -rm        删除hdfs中文件
         hadoop fs -rmr    删除hdfs中的文件夹

   二、java代码方式

        1、通过java.net包中的URL读取HDFS中的数据

package com.lyz.hadoop.hdfs;

import java.io.InputStream;
import java.net.URL;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

/**
 * 以URL方式读取hadoop中的文件
 * @author liuyazhuang
 *
 */
public class AppDemo1 {
	//hadoop中文件的路径
	private static final String HDFS_PATH = "hdfs://liuyazhuang:9000/d100/d1000";
	public static void main(String[] args) throws Exception {
		//给URL配置解析器,使其能解析hdfs协议
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
		URL url = new URL(HDFS_PATH);
		InputStream in = url.openStream();
		//将文件的内容copy到控制台
		IOUtils.copyBytes(in, System.out, 1024, true);
	}
}
效果:

        2、使用Hadoop中的FileSystem类操作HDFS

        (1)创建文件夹

            未创建时的效果为:

          

        运行程序代码:

package com.lyz.hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 * 使用FileSystem操作HDFS
 * @author liuyazhuang
 *
 */
public class AppDemo2 {
	private static final String HDFS_PATH = "hdfs://liuyazhuang:9000";
	private static final String DIR_PATH = "/d100";
	private static final String FILE_PATH = "/d100/d1000";
	public static void main(String[] args) throws Exception{
		FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//创建文件夹
		makeDir(fileSystem);
		//上传文件
		//uploadData(fileSystem);
		//下载文件
//		downloadData(fileSystem);
		//删除文件
//		deleteData(fileSystem);
	}

	/**
	 * 删除文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void deleteData(FileSystem fileSystem) throws IOException {
		fileSystem.delete(new Path(FILE_PATH), true);
	}

	/**
	 * 下载文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void downloadData(FileSystem fileSystem) throws IOException {
		FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
		IOUtils.copyBytes(in, System.out, 1024, true);
	}

	/**
	 * 创建文件夹
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void makeDir(FileSystem fileSystem) throws IOException {
		fileSystem.mkdirs(new Path(DIR_PATH));
	}
	
	/**
	 * 上传文件
	 * @param fileSystem
	 * @throws IOException
	 * @throws FileNotFoundException
	 */
	private static void uploadData(FileSystem fileSystem) throws IOException,
			FileNotFoundException {
		FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));
		InputStream in = new FileInputStream(new File("d:/log.txt"));
		IOUtils.copyBytes(in, out, 1024, true);
	}
}

          效果如下图所示:

               

         在HDFS文件系统中成功创建文件夹。

         (2)上传文件

         未上传文件的效果为:

    

      运行代码:

package com.lyz.hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 * 使用FileSystem操作HDFS
 * @author liuyazhuang
 *
 */
public class AppDemo2 {
	private static final String HDFS_PATH = "hdfs://liuyazhuang:9000";
	private static final String DIR_PATH = "/d100";
	private static final String FILE_PATH = "/d100/d1000";
	public static void main(String[] args) throws Exception{
		FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//创建文件夹
//		makeDir(fileSystem);
		//上传文件
		uploadData(fileSystem);
		//下载文件
//		downloadData(fileSystem);
		//删除文件
//		deleteData(fileSystem);
	}

	/**
	 * 删除文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void deleteData(FileSystem fileSystem) throws IOException {
		fileSystem.delete(new Path(FILE_PATH), true);
	}

	/**
	 * 下载文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void downloadData(FileSystem fileSystem) throws IOException {
		FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
		IOUtils.copyBytes(in, System.out, 1024, true);
	}

	/**
	 * 创建文件夹
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void makeDir(FileSystem fileSystem) throws IOException {
		fileSystem.mkdirs(new Path(DIR_PATH));
	}
	
	/**
	 * 上传文件
	 * @param fileSystem
	 * @throws IOException
	 * @throws FileNotFoundException
	 */
	private static void uploadData(FileSystem fileSystem) throws IOException,
			FileNotFoundException {
		FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));
		InputStream in = new FileInputStream(new File("d:/log.txt"));
		IOUtils.copyBytes(in, out, 1024, true);
	}
}

       上传后的效果为

      

     

       (3)下载文件

       此处的下载文件是将HDFS中文件的内容打印输出到控制台

     运行代码:

package com.lyz.hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 * 使用FileSystem操作HDFS
 * @author liuyazhuang
 *
 */
public class AppDemo2 {
	private static final String HDFS_PATH = "hdfs://liuyazhuang:9000";
	private static final String DIR_PATH = "/d100";
	private static final String FILE_PATH = "/d100/d1000";
	public static void main(String[] args) throws Exception{
		FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//创建文件夹
//		makeDir(fileSystem);
		//上传文件
//		uploadData(fileSystem);
		//下载文件
		downloadData(fileSystem);
		//删除文件
//		deleteData(fileSystem);
	}

	/**
	 * 删除文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void deleteData(FileSystem fileSystem) throws IOException {
		fileSystem.delete(new Path(FILE_PATH), true);
	}

	/**
	 * 下载文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void downloadData(FileSystem fileSystem) throws IOException {
		FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
		IOUtils.copyBytes(in, System.out, 1024, true);
	}

	/**
	 * 创建文件夹
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void makeDir(FileSystem fileSystem) throws IOException {
		fileSystem.mkdirs(new Path(DIR_PATH));
	}
	
	/**
	 * 上传文件
	 * @param fileSystem
	 * @throws IOException
	 * @throws FileNotFoundException
	 */
	private static void uploadData(FileSystem fileSystem) throws IOException,
			FileNotFoundException {
		FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));
		InputStream in = new FileInputStream(new File("d:/log.txt"));
		IOUtils.copyBytes(in, out, 1024, true);
	}
}

       效果为:

   

      (4)删除文件

      删除HDFS中上传的文件

     运行代码:

package com.lyz.hadoop.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 * 使用FileSystem操作HDFS
 * @author liuyazhuang
 *
 */
public class AppDemo2 {
	private static final String HDFS_PATH = "hdfs://liuyazhuang:9000";
	private static final String DIR_PATH = "/d100";
	private static final String FILE_PATH = "/d100/d1000";
	public static void main(String[] args) throws Exception{
		FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH), new Configuration());
		//创建文件夹
//		makeDir(fileSystem);
		//上传文件
//		uploadData(fileSystem);
		//下载文件
//		downloadData(fileSystem);
		//删除文件
		deleteData(fileSystem);
	}

	/**
	 * 删除文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void deleteData(FileSystem fileSystem) throws IOException {
		fileSystem.delete(new Path(FILE_PATH), true);
	}

	/**
	 * 下载文件
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void downloadData(FileSystem fileSystem) throws IOException {
		FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
		IOUtils.copyBytes(in, System.out, 1024, true);
	}

	/**
	 * 创建文件夹
	 * @param fileSystem
	 * @throws IOException
	 */
	private static void makeDir(FileSystem fileSystem) throws IOException {
		fileSystem.mkdirs(new Path(DIR_PATH));
	}
	
	/**
	 * 上传文件
	 * @param fileSystem
	 * @throws IOException
	 * @throws FileNotFoundException
	 */
	private static void uploadData(FileSystem fileSystem) throws IOException,
			FileNotFoundException {
		FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));
		InputStream in = new FileInputStream(new File("d:/log.txt"));
		IOUtils.copyBytes(in, out, 1024, true);
	}
}

        效果为:

     

相关内容