/////////////////////////////////////////////////////////////////////////////
// Original code from libhdfs3. Copyright (c) 2013 - 2014, Pivotal Inc.
// All rights reserved. Author: Zhanwei Wang
/////////////////////////////////////////////////////////////////////////////
//  Modifications by Kumo Inc.
// Copyright (C) Kumo inc. and its affiliates.
// Author: Jeff.li lijippy@163.com
// All rights reserved.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published
// by the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program.  If not, see <https://www.gnu.org/licenses/>.
//




#include <kmhdfs/common/exception.h>
#include <sstream>

namespace Hdfs {
    const char *HdfsIOException::ReflexName = "java.io.IOException";

    const char *AlreadyBeingCreatedException::ReflexName =
            "org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException";

    const char *AccessControlException::ReflexName =
            "org.apache.hadoop.security.AccessControlException";

    const char *FileAlreadyExistsException::ReflexName =
            "org.apache.hadoop.fs.FileAlreadyExistsException";

    const char *DSQuotaExceededException::ReflexName =
            "org.apache.hadoop.hdfs.protocol.DSQuotaExceededException";

    const char *NSQuotaExceededException::ReflexName =
            "org.apache.hadoop.hdfs.protocol.NSQuotaExceededException";

    const char *ParentNotDirectoryException::ReflexName =
            "org.apache.hadoop.fs.ParentNotDirectoryException";

    const char *SafeModeException::ReflexName =
            "org.apache.hadoop.hdfs.server.namenode.SafeModeException";

    const char *NotReplicatedYetException::ReflexName =
            "org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException";

    const char *FileNotFoundException::ReflexName = "java.io.FileNotFoundException";

    const char *UnresolvedLinkException::ReflexName =
            "org.apache.hadoop.fs.UnresolvedLinkException";

    const char *UnsupportedOperationException::ReflexName =
            "java.lang.UnsupportedOperationException";

    const char *ReplicaNotFoundException::ReflexName =
            "org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException";

    const char *NameNodeStandbyException::ReflexName =
            "org.apache.hadoop.ipc.StandbyException";

    const char *HdfsInvalidBlockToken::ReflexName =
            "org.apache.hadoop.security.token.SecretManager$InvalidToken";

    const char *SaslException::ReflexName = "javax.security.sasl.SaslException";

    const char *RpcNoSuchMethodException::ReflexName = "org.apache.hadoop.ipc.RpcNoSuchMethodException";

    const char *InvalidParameter::ReflexName = "java.lang.IllegalArgumentException";

    const char *HadoopIllegalArgumentException::ReflexName =
            "org.apache.hadoop.HadoopIllegalArgumentException";

    const char *RecoveryInProgressException::ReflexName =
            "org.apache.hadoop.hdfs.protocol.RecoveryInProgressException";

    HdfsException::HdfsException(const std::string &arg, const char *file,
                                 int line, const char *stack) : std::runtime_error(arg) {
        std::ostringstream ss;
        ss << file << ": " << line << ": " << arg << std::endl << stack;
        detail = ss.str();
    }
}
