Member since
05-02-2018
7
Posts
0
Kudos Received
0
Solutions
06-20-2018
07:07 AM
thanks for reply After Adding above code previous exception is resolve but it gives new exception as following, i have added hadoop-hdfs-2.9.0.jar though still it throws following exception Exception in thread "main" java.lang.NoSuchMethodError: org.apache.hadoop.hdfs.server.namenode.NameNode.getAddress(Ljava/lang/String;)Ljava/net/InetSocketAddress;
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:99)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3242)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:121)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3291)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3259)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:470)
at Opts.main(Opts.java:14)
... View more
06-19-2018
01:26 PM
18/06/19 18:40:43 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Exception in thread "main" org.apache.hadoop.fs.UnsupportedFileSystemException: No FileSystem for scheme "hdfs"
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:3332)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3403)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3371)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:477)
at com.Jambo.App.main(App.java:21)
My code is
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class App
{
public static void main( String[] args ) throws IOException
{
System.out.println( "Hello World!" );
System.out.println("---143---");
String localPath="/home/user1/Documents/hdfspract.txt";
String uri="hdfs://172.16.32.139:9000";
String hdfsDir="hdfs://172.16.32.139:9000/fifo_tbl";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri),conf);
fs.copyFromLocalFile(new Path(localPath),new Path(hdfsDir));
}
}
... View more
Labels:
- Labels:
-
Apache Hadoop
06-18-2018
01:59 PM
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
public class HdfsCon
{
public static void main(String[] args) throws Exception
{
String HdfsUrl="hdfs://172.16.32.139:9000";
Configuration conf=new Configuration();
URI uri=new URI(HdfsUrl);
FileSystem fs= FileSystem.get(URI.create(HdfsUrl),conf);
String fp="/home/user1/Documents/hive-site.xml";
String tp="/fifo_tbl";
fs.copyFromLocalFile(new Path(fp),new Path(tp));
}
}
It gives me following Exception........
Exception in thread "main" org.apache.hadoop.ipc.RemoteException: Server IPC version 9 cannot communicate with client version 4
at org.apache.hadoop.ipc.Client.call(Client.java:1113)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at com.sun.proxy.$Proxy1.getProtocolVersion(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)
at com.sun.proxy.$Proxy1.getProtocolVersion(Unknown Source)
at org.apache.hadoop.ipc.RPC.checkVersion(RPC.java:422)
at org.apache.hadoop.hdfs.DFSClient.createNamenode(DFSClient.java:183)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:281)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:245)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:100)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:1446)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:67)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1464)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:263)
at HdfsCon.main(HdfsCon.java:20)
... View more
Labels:
- Labels:
-
Apache Hadoop
05-30-2018
01:18 PM
18/05/30 17:12:15 INFO jdbc.Utils: Supplied authorities: localhost:10000
18/05/30 17:12:15 INFO jdbc.Utils: Resolved authority: localhost:10000
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.hadoop.hive.common.auth.HiveAuthUtils.getSocketTransport(Ljava/lang/String;II)Lorg/apache/thrift/transport/TTransport;
at org.apache.hive.jdbc.HiveConnection.createUnderlyingTransport(HiveConnection.java:519)
at org.apache.hive.jdbc.HiveConnection.createBinaryTransport(HiveConnection.java:539)
at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:309)
at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:196)
at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107)
at java.sql.DriverManager.getConnection(DriverManager.java:664)
at java.sql.DriverManager.getConnection(DriverManager.java:247)
at Hive_java.main(Hive_java.java:15) Driver name is org.apache.hive.jdbc.HiveDriver connection URL is "jdbc:hive2://localhost:10000/default", "", ""
... View more
Labels:
- Labels:
-
Apache Hive