Saturday, 28 October 2017

Read & Write Data To HDFS Using Java API Programs 2

PROGRAM 6 :  to copy path from local to HDFS...


import java.io.IOException;
import java.io.InputStream;
import java.io.FileInputStream;
import java.io.BufferedInputStream;
import java.net.URI;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.conf.Configuration;

public class FileSystemCopyToHdfs
{
 public static void main(String args[]) throws Exception
 {
  String localSrc=args[0];
  String dst=args[1];
  InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
  Configuration conf=new Configuration();
  FileSystem fs = FileSystem.get(URI.create(dst), conf); 
  FSDataOutputStream out=fs.create(new Path(dst),new Progressable()
  {
   public void progress()
   {
    System.out.print(".");
   }
  });
  IOUtils.copyBytes(in,out,4096,true);
 }
}

Output:

hadoop FileSystemCopyToHdfs /home/administrator/demo.txt /copyiedata

hadoop fs -cat /copyiedata
17/08/11 11:39:33 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
HELLO WORLD...




PROGRAM 7 :   Display the File...


import java.io.IOException;
import java.io.BufferedInputStream;
import java.net.URI;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;

public class FileListStatus
{
 public static void main(String[] args) throws Exception
 {
  String uri=args[0];
  Configuration conf=new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri),conf);
  Path[] paths = new Path [args.length];
  for(int i=0; i <  paths.length;i++)
  {
   paths[i] = new Path(args[i]);
  }
  FileStatus [] status =fs.listStatus(paths);
  Path [] listedPaths = FileUtil.stat2Paths(status);
  for(Path p : listedPaths)
  {
  System.out.println(p);
  }
 }
}


Output:

hadoop FileListStatus hdfs://localhost:9000/Employee/student.txt
17/08/11 11:43:25 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
hdfs://localhost:9000/Employee/student.txt




PROGRAM 8 :  Create a Directory in hdfs...



import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;

public class FileSystemMkdir
{
 public static void main(String args[]) throws Exception
 {
  String uri=args[0];
  Configuration conf=new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri), conf); 
  
  try
  {
   boolean in=fs.mkdirs(new Path(uri));
   if(in==true)
   {
    System.out.println("Directory Created...");
   } 
  }
  catch(Exception e)
  {
   System.out.println(e);
  }
 }
}


Output:

hadoop FileSystemMkdir hdfs://localhost:9000/SampleDir
17/08/11 11:55:04 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Directory Created...




PROGRAM 9 :  Show the Status of the File...


import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;

public class FileSystemStatus
{
 public static void main(String args[]) throws Exception
 {
  String uri=args[0];
  Configuration conf=new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri), conf); 
  Path file = new Path(uri);
  FileStatus stat = fs.getFileStatus(file);
  System.out.println("Path: "+stat.getPath().toString()); 
  System.out.println("isDir: "+stat.isDirectory()); 
  System.out.println("Length: "+stat.getLen()); 
  System.out.println("Modificaton Time: "+stat.getModificationTime()); 
  System.out.println("Replication: "+stat.getReplication()); 
  System.out.println("Block Size: "+stat.getBlockSize()); 
  System.out.println("Owner: "+stat.getOwner()); 
  System.out.println("Group: "+stat.getGroup()); 
  System.out.println("Permissions: "+stat.getPermission().toString()); 
   
  
 }
}

Output:

hadoop FileSystemStatus hdfs://localhost:9000/Employee
17/08/11 11:52:02 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Path: hdfs://localhost:9000/Employee
isDir: true
Length: 0
Modificaton Time: 1502429644769
Replication: 0
Block Size: 0
Owner: administrator
Group: supergroup
Permissions: rwxr-xr-x




PROGRAM 10 :  Delete the Directory 


import java.io.IOException;
import java.io.BufferedInputStream;
import java.net.URI;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.Configuration;

public class FileSystemDelete
{
 public static void main(String[] args) throws Exception
 {
  String uri=args[0];
  boolean in=false;
  Configuration conf=new Configuration();
  FileSystem fs = FileSystem.get(URI.create(uri),conf);
  Path fs2=new Path(uri);
  FileStatus stat = fs.getFileStatus(fs2);
  
  if(stat.isDirectory())
  {
   System.out.println("File is Deleted"+in); 
  }
  else
  {
   in = fs.delete(new Path (uri));
   System.out.println("File is not Deleted"+in);
  }
 }
}


Output:



hadoop FileSystemDelete hdfs://localhost:9000/Employee/
17/08/11 11:56:59 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
File is Deletedfalse

0 comments:

Post a Comment