加入收藏 | 设为首页 | 会员中心 | 我要投稿 李大同 (https://www.lidatong.com.cn/)- 科技、建站、经验、云计算、5G、大数据,站长网!
当前位置: 首页 > 编程开发 > Java > 正文

java – 如何通过OSGI包中的kerberos与hdfs连接

发布时间:2020-12-15 02:15:00 所属栏目:Java 来源:网络整理
导读:我们正在尝试使用kerberos与HDFS连接,来自OSGI捆绑的Karaf容器.我们已经使用apache servicemix bundle在karaf中安装了hadoop客户端. groupIdorg.apache.servicemix.bundles/groupId artifactIdorg.apache.servicemix.bundles.hadoop-client/artifactId versi
我们正在尝试使用kerberos与HDFS连接,来自OSGI捆绑的Karaf容器.我们已经使用apache servicemix bundle在karaf中安装了hadoop客户端.

<groupId>org.apache.servicemix.bundles</groupId>
            <artifactId>org.apache.servicemix.bundles.hadoop-client</artifactId>
            <version>2.4.1_1</version>

Pom文件附在下面:

<build>
        <plugins>
            <plugin>
                <groupId>org.apache.felix</groupId>
                <artifactId>maven-bundle-plugin</artifactId>
                <version>2.3.7</version>
                <extensions>true</extensions>
                <configuration>
                    <instructions>
                        <Bundle-Activator>com.bdbizviz.hadoop.activator.PaHdfsActivator</Bundle-Activator>
                        <Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
                        <Bundle-Version>${project.version}</Bundle-Version>
                        <Export-Package>
                            <!-- com.google.*,!org.apache.camel.model.dataformat,!org.apache.poi.ddf,!org.apache.xmlbeans,org.apache.commons.collections.*,org.apache.commons.configuration.*,org.apache.hadoop.hdfs*,org.apache.hadoop.hdfs.client*,org.apache.hadoop.hdfs.net*,org.apache.hadoop.hdfs.protocol.datatransfer*,org.apache.hadoop.hdfs.protocol.proto*,org.apache.hadoop.hdfs.protocolPB*,org.apache.hadoop.conf.*,org.apache.hadoop.io.*,org.apache.hadoop.fs.*,org.apache.hadoop.security.*,org.apache.hadoop.metrics2.*,org.apache.hadoop.util.*,org.apache.hadoop*; -->
                            <!-- org.apache.*; -->
                        </Export-Package>
                        <Import-Package>
                            org.apache.hadoop*,org.osgi.framework,*;resolution:=optional
                        </Import-Package>
                        <Include-Resource>
                            {maven-resources},@org.apache.servicemix.bundles.hadoop-client-2.4.1_1.jar!/coredefault.
                            xml,@org.apache.servicemix.bundles.hadoop-client-2.4.1_1.jar!/hdfsdefault.
                            xml,@org.apache.servicemix.bundles.hadoop-client-
                            2.4.1_1.jar!/mapred-default.xml,@org.apache.servicemix.bundles.hadoop-client-
                            2.4.1_1.jar!/hadoop-metrics.properties
                        </Include-Resource>
                        <DynamicImport-Package>*</DynamicImport-Package>
                    </instructions>
                </configuration>
            </plugin>
        </plugins>
    </build>
    <dependencies>
        <dependency>
            <groupId>org.apache.servicemix.bundles</groupId>
            <artifactId>org.apache.servicemix.bundles.hadoop-client</artifactId>
            <version>2.4.1_1</version>
            <exclusions>
                <exclusion>
                    <groupId>jdk.tools</groupId>
                    <artifactId>jdk.tools</artifactId>
                    <!-- <version>1.7</version> -->
                </exclusion>
            </exclusions>
        </dependency>

    </dependencies>

代码片段:

public class TestHdfs implements ITestHdfs{

    public void printName() throws IOException{

        /*

        Configuration config = new Configuration();
        config.set("fs.default.name","hdfs://192.168.1.17:8020");
        config.set("fs.hdfs.impl",org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
        config.set("fs.file.impl",org.apache.hadoop.fs.LocalFileSystem.class.getName());
        try {
            fs = FileSystem.get(config);
            getHostnames(fs);
        } catch (IOException e) {
            e.printStackTrace();
        }*/
        Thread.currentThread().setContextClassLoader(getClass().getClassLoader());

        final Configuration config = new Configuration();
        config.set("fs.default.name","hdfs://192.168.1.124:8020");
        config.set("fs.file.impl",LocalFileSystem.class.getName());
        config.set("fs.hdfs.impl",DistributedFileSystem.class.getName());
        config.set("hadoop.security.authentication","KERBEROS");
        config.set("dfs.namenode.kerberos.principal.pattern","hdfs/*@********.COM");

        System.setProperty("HADOOP_JAAS_DEBUG","true");
        System.setProperty("sun.security.krb5.debug","true");
        System.setProperty("java.net.preferIPv4Stack","true");

        System.out.println("--------------status---:"
                + UserGroupInformation.isSecurityEnabled());
        UserGroupInformation.setConfiguration(config);
        // UserGroupInformation.loginUserFromKeytab(
        // "hdfs/hadoop1.********.com@********.COM",// "file:/home/kaushal/hdfs-hadoop1.keytab");

        UserGroupInformation app_ugi = UserGroupInformation
                .loginUserFromKeytabAndReturnUGI("hdfs/hadoop1.********.com@********.COM","C:Usersdesanth.pvDesktophdfs-hadoop1.keytab");
        UserGroupInformation proxy_ugi = UserGroupInformation.createProxyUser(
                "ssdfsdfsdfsdfag",app_ugi);
        System.out.println("--------------status---:"
                + UserGroupInformation.isSecurityEnabled());
        /*ClassLoader tccl = Thread.currentThread()
                .getContextClassLoader();*/
        try {
            /*Thread.currentThread().setContextClassLoader(
                    getClass().getClassLoader());*/
            proxy_ugi.doAs(new PrivilegedExceptionAction() {

                @Override
                public Object run() throws Exception {
                    /*ClassLoader tccl = Thread.currentThread()
                            .getContextClassLoader();*/
                    try {
                        /*Thread.currentThread().setContextClassLoader(
                                getClass().getClassLoader());*/
                        System.out.println("desanth");
                        FileSystem fs = FileSystem.get(config);
                        DistributedFileSystem hdfs = (DistributedFileSystem) fs;
                        DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();

                        String[] names = new String[dataNodeStats.length];
                        for (int i = 0; i < dataNodeStats.length; i++) {
                            names[i] = dataNodeStats[i].getHostName();
                            System.out.println((dataNodeStats[i].getHostName()));
                        }
                    } catch (IOException e) {
                        e.printStackTrace();
                    } finally {
                        //Thread.currentThread().setContextClassLoader(tccl);
                    }

                    return null;
                }
            });
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } finally {
            /*Thread.currentThread().setContextClassLoader(tccl);*/

        }
    }



    public void getHostnames(FileSystem fs) throws IOException {
        DistributedFileSystem hdfs = (DistributedFileSystem) fs;
        DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();

        String[] names = new String[dataNodeStats.length];
        for (int i = 0; i < dataNodeStats.length; i++) {
            names[i] = dataNodeStats[i].getHostName();
            System.out.println((dataNodeStats[i].getHostName()));
        }
    }
}

错误:

Caused by: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS]
[12:35:51 PM] Jayendra Parsai: java.io.IOException: Failed on local exception: java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS]; Host Details : local host is: "jayendra-dynabook-T451-34EW/127.0.1.1"; destination host is: "hadoop2.********.com":8020;

解决方法

我没有尝试在OSGI环境中重现此问题,但我认为您可能遇到的问题类似于您尝试在包含hadoop / hdfs依赖项的胖jar的Kerberised环境中运行时遇到的问题.

即org.apache.hadoop.security.AccessControlException:客户端无法通过以下方式进行身份验证:[TOKEN,KERBEROS]错误.

背景

在打开DEBUG日志后,SASL协商后有一条有趣的线路:

获取kerberos信息proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:null

请注意,null – 成功执行在此处具有类引用.

对此进行跟踪,SaslRpcClient调用SecurityUtil.getTokenInfo.这将启动对所有org.apache.hadoop.security.SecurityInfo提供程序的搜索.

org.apache.hadoop.security.SecurityUtil使用java.util.ServiceLoader查找SecurityInfo实例. ServiceLoader默认使用当前线程的ContextClassLoader来查找类路径上META-INF / services /目录中的文件.这些文件的名称与服务名称相对应,因此它正在寻找META-INF / services / org.apache.hadoop.security.SecurityInfo

当jar是超级jar时(或者我猜你是否在OSGI包中加载了某些东西)并且你在类路径中只有一个这样的文件,那么你必须确保附加所有条目.例如,在maven中,您可以使用ServicesResourceTransformer附加条目. sbt-assembly有一个类似的合并选项,可以更容易配置.

如后面所述,确保java.util.ServiceLoader正在使用的类加载器可以找到带有hadoop jar中所有条目的META-INF / services / org.apache.hadoop.security.SecurityInfo.

在OSGI案例中,您仍然必须以某种方式合并条目.尝试将它们包含在< Include-Resources>中你的XML包的一部分?

记录输出

这是我不能工作的输出:

2018-05-03 12:01:56,739 DEBUG PrivilegedAction as:user@DOMAIN (auth:KERBEROS) from:org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:757) [ForkJoinPool-1-worker-5] org.apache.hadoop.security.UserGroupInformation (UserGroupInformation.java:1893) 
2018-05-03 12:01:56,740 DEBUG Sending sasl message state: NEGOTIATE
                                                                                   [ForkJoinPool-1-worker-5] org.apache.hadoop.security.SaslRpcClient (SaslRpcClient.java:457) 
2018-05-03 12:01:56,741 DEBUG Received SASL message state: NEGOTIATE
auths {
  method: "TOKEN"
  mechanism: "DIGEST-MD5"
  protocol: ""
  serverId: "default"
  challenge: "XXX"
}
auths {
  method: "KERBEROS"
  mechanism: "GSSAPI"
  protocol: "XXX"
  serverId: "XXX"
}
 [ForkJoinPool-1-worker-5] org.apache.hadoop.security.SaslRpcClient (SaslRpcClient.java:389) 
2018-05-03 12:01:56,741 DEBUG Get token info proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:null                      [ForkJoinPool-1-worker-5] org.apache.hadoop.security.SaslRpcClient (SaslRpcClient.java:264) 
2018-05-03 12:01:56,741 DEBUG Get kerberos info proto:interface org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB info:null                   [ForkJoinPool-1-worker-5] org.apache.hadoop.security.SaslRpcClient (SaslRpcClient.java:291) 
2018-05-03 12:01:56,742 DEBUG PrivilegedActionException as:user@DOMAIN (auth:KERBEROS) cause:org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS] [ForkJoinPool-1-worker-5] org.apache.hadoop.security.UserGroupInformation (UserGroupInformation.java:1870) 
2018-05-03 12:01:56,742 DEBUG PrivilegedAction as:user@DOMAIN (auth:KERBEROS) from:org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:683) [ForkJoinPool-1-worker-5] org.apache.hadoop.security.UserGroupInformation (UserGroupInformation.java:1893) 
2018-05-03 12:01:56,743  WARN Exception encountered while connecting to the server : org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS] [ForkJoinPool-1-worker-5] org.apache.hadoop.ipc.Client (Client.java:715) 
2018-05-03 12:01:56,743 DEBUG PrivilegedActionException as:user@DOMAIN (auth:KERBEROS) cause:java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,743 DEBUG closing ipc connection to XXX/nnn.nnn.nnn.nnn:8020: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS] [ForkJoinPool-1-worker-5] org.apache.hadoop.ipc.Client (Client.java:1217) 
java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS]
    at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:720)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
    at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:683)
    at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:770)
    at org.apache.hadoop.ipc.Client$Connection.access$3200(Client.java:397)
    at org.apache.hadoop.ipc.Client.getConnection(Client.java:1620)
    at org.apache.hadoop.ipc.Client.call(Client.java:1451)
    at org.apache.hadoop.ipc.Client.call(Client.java:1398)
    at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
    at com.sun.proxy.$Proxy10.create(Unknown Source)
    at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:313)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291)
    at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203)
    at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185)
    at com.sun.proxy.$Proxy11.create(Unknown Source)
    at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1822)
    at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1701)
    at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1636)
    at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:480)
    at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:476)
    at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
    at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:476)
    at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:417)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:930)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:807)
    at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:796)
    ...
Caused by: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN,KERBEROS]
    at org.apache.hadoop.security.SaslRpcClient.selectSaslClient(SaslRpcClient.java:172)
    at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:396)
    at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:595)
    at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:397)
    at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:762)
    at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:758)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:422)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
    at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:757)
    ... 50 more

(编辑:李大同)

【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容!

    推荐文章
      热点阅读