我的hdfs没有设置过验证,但是在连接中,一直报错
java.lang.IllegalArgumentException: Can't get Kerberos realm
at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:71)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:319)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:304)
at org.apache.hadoop.security.UserGroupInformation.isAuthenticationMethodEnabled(UserGroupInformation.java:392)
at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:386)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:621)
at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithAlignmentContext(NameNodeProxiesClient.java:370)
at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:348)
at org.apache.hadoop.hdfs.server.namenode.ha.ClientHAProxyFactory.createProxy(ClientHAProxyFactory.java:46)
at org.apache.hadoop.hdfs.server.namenode.ha.AbstractNNFailoverProxyProvider.createProxyIfNeeded(AbstractNNFailoverProxyProvider.java:152)
at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:60)
at org.apache.hadoop.io.retry.RetryInvocationHandler$ProxyDescriptor.<init>(RetryInvocationHandler.java:197)
at org.apache.hadoop.io.retry.RetryInvocationHandler.<init>(RetryInvocationHandler.java:328)
at org.apache.hadoop.io.retry.RetryInvocationHandler.<init>(RetryInvocationHandler.java:322)
at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:59)
at org.apache.hadoop.hdfs.NameNodeProxiesClient.createHAProxy(NameNodeProxiesClient.java:326)
at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:144)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:356)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:290)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:171)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3303)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479)
at org.apache.hadoop.fs.FileSystem$1.run(FileSystem.java:217)
at org.apache.hadoop.fs.FileSystem$1.run(FileSystem.java:214)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:214)
at com.saicmotor.worker.HdfsTest.main(HdfsTest.java:70)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.security.authentication.util.KerberosUtil.getDefaultRealm(KerberosUtil.java:110)
at org.apache.hadoop.security.HadoopKerberosName.setConfiguration(HadoopKerberosName.java:69)
... 31 more
Caused by: KrbException: Cannot locate default realm
at sun.security.krb5.Config.getDefaultRealm(Config.java:1029)
... 37 more
连接代码如下
public static void main(String[] args) {
System.out.println("configuration null");
String core_site = PropertiesUtil.getSysProperty("hdfs.core_site");
String hdfs_site = PropertiesUtil.getSysProperty("hdfs.hdfs_site");
System.out.println(core_site);
System.out.println(hdfs_site);
System.out.println("-=-----"+System.getProperty("java.security.krb5.conf"));
HdfsTest test = new HdfsTest();
try {
FileSystem fs = test.getFs();
Set<String> set = test.recursiveHdfsPath(fs, new Path("/"));
set.forEach(a -> System.out.println(a));
return;
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("configuration core_site");
try {
FileSystem fs = test.getFs(core_site, hdfs_site);
Set<String> set = test.recursiveHdfsPath(fs, new Path("/"));
set.forEach(a -> System.out.println(a));
return;
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("conf core-site");
try {
Configuration conf = new Configuration();
conf.addResource(new Path(core_site));
conf.addResource(new Path(hdfs_site));
// UserGroupInformation.setConfiguration(conf);
// UserGroupInformation.loginUserFromKeytab(HdfsConfig.PRINCIPAL, HdfsConfig.KEYTAB_PATH);
FileSystem fs = FileSystem.get(URI.create("hdfs://tcluster"), conf);
Set<String> set = test.recursiveHdfsPath(fs, new Path("/"));
set.forEach(a -> System.out.println(a));
return ;
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("conf hdfs-site");
try {
Configuration conf = new Configuration();
conf.addResource(new Path(core_site));
conf.addResource(new Path(hdfs_site));
// UserGroupInformation.setConfiguration(conf);
// UserGroupInformation.loginUserFromKeytab(HdfsConfig.PRINCIPAL, HdfsConfig.KEYTAB_PATH);
FileSystem fs = FileSystem.get(URI.create("hdfs://tcluster"), conf,"spark");
Set<String> set = test.recursiveHdfsPath(fs, new Path("/"));
set.forEach(a -> System.out.println(a));
return ;
} catch (Exception e) {
e.printStackTrace();
}
}
怎样才能跳过krb验证,直接访问到h'd'f's