2

Hadoop クラスターは kerberos を使用するため、最初に kinit を使用し、次に「hadoop fs -ls /」などのコマンドを使用する必要があります。今、私は jaas と gssapi ログインを使用して、クラスター内にファイルを作成しますが、失敗しました。私のコードは次のとおりです。

import java.security.PrivilegedAction;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;  
import org.ietf.jgss.*;

public static void main(String[] args) throws LoginException
{
    System.setProperty("sun.security.krb5.debug", "false");
    System.setProperty("java.security.krb5.realm", "H236");
    System.setProperty("java.security.krb5.kdc", "172.16.0.236");
    System.setProperty( "javax.security.auth.useSubjectCredsOnly", "false");
    System.setProperty("java.security.auth.login.config",
            "/etc/hadoop/conf/jaas.conf");
           LoginContext lc = null;
           lc = new LoginContext("Client");
           lc.login();
           System.out.println("Authentication succeeded!");

       Subject subject = lc.getSubject();
       Subject.doAs( subject, new PrivilegedAction<byte[]>() 
           {
        public byte[] run() 
        {
            Configuration conf = new Configuration();

            try
            {
                Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2");
                GSSManager manager = GSSManager.getInstance();
                GSSName clientName = manager.createName("hdfs/172.16.0.239@H236", 
                        GSSName.NT_USER_NAME);
                GSSCredential clientCreds = manager.createCredential(clientName,
                        GSSCredential.DEFAULT_LIFETIME,
                        krb5Mechanism,
                        GSSCredential.INITIATE_ONLY);
                GSSName serverName = manager.createName("hdfs@172.16.0.239", 
                        GSSName.NT_HOSTBASED_SERVICE);
                GSSContext context = manager.createContext(serverName,
                        krb5Mechanism,
                                            clientCreds,
                                            GSSContext.DEFAULT_LIFETIME);
                context.requestMutualAuth(true);
                context.requestConf(false);
                context.requestInteg(true);
                System.out.println(clientCreds.getName().toString());
                System.out.println(clientCreds.getRemainingLifetime());                     

                    byte[] outToken = context.initSecContext(new byte[0], 0, 0);
                //create file on hadoop cluster
                FileSystem fs = FileSystem.get(conf);
                Path f = new Path("hdfs:///hdfs/123");
                FSDataOutputStream s = fs.create(f, true);
                System.out.println("done\n");                   
                                    int i = 0;
                for (i = 0; i < 100; ++i)
                    s.writeChars("test");
                s.close();

            }catch (Exception e)
                       {
                                    e.printStackTrace();
                       }
                            return null;
                  }//endof run
        });
   }//endof main

以下のようなjaas.conf:

Client {
          com.sun.security.auth.module.Krb5LoginModule required
          debug=true
          storeKey=true
          doNotPrompt=true
          useKeyTab=true
          keyTab="/etc/hadoop/conf/hdfs.keytab"
          principal="hdfs/172.16.0.239@H236";
         };

私のログインユーザー名はルートです。「hadoop jar ./client.jar」を使用する前にこのコードを実行し、kdestory を実行して kerberos キャッシュを削除すると、以下のエラーが発生します。

Authentication succeeded!
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.sec
urity.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided      
(Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
ipc.Client: Exception encountered while connecting to the server : javax.security.sasl.SaslExcepti
on: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to    
obtain new INITIATE credentials failed! (null))]
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:java.io.I
OException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid   
credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
WARN retry.RetryInvocationHandler: Exception while invoking class org.apache.hadoop.hdfs.protocolPB.Cli
entNamenodeProtocolTranslatorPB.create. Not retrying because the invoked method is not idempotent,   
and unable to determine whether it was invoked
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: 
GSSinitiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to 
obtain new INITIATE credentials failed! (null))]; Host Details : local host is: "XP236/172.16.0.236"; destination 
host is: "172.16.0.236":8020;at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)

私はそれを機能させる方法がわかりません、誰かが私を助けてくれますか、どうもありがとう。

4

1 に答える 1