/*
 * Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
 * All rights reserved. This program and the accompanying materials
 * are made available under the terms of the Eclipse Public License v1.0
 * which accompanies this distribution, and is available at
 * http://www.eclipse.org/legal/epl-v10.html
 * 
 * Contributors:
 *    Eike Stepper - initial API and implementation
 */
package org.eclipse.emf.extensions;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import org.eclipse.core.runtime.Assert;
import org.eclipse.emf.db.util.DBException;
import org.eclipse.emf.db.util.DBModelInformationCache;
import org.eclipse.emf.db.util.DBUtil;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.EcorePackage;

import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;

/**
 * @author Eike Stepper
 */
public abstract class DBTable implements IDBTable {
    private final Set<String> columnNames=new HashSet<String>();

    private final DBSchema schema;

    private List<IDBField> fields;

    private final List<DBIndex> indices=new ArrayList<DBIndex>();

    private final IDBTableDescriptor descriptor;

    protected DBTable(DBSchema schema, IDBTableDescriptor descriptor /* EClass clazz */) {
        this.schema=schema;
        this.descriptor=descriptor /* EClass clazz */;
    }

    @Override
    public IDBSchema getSchema() {
        return schema;
    }

    @Override
    public IDBTableDescriptor getDescriptor() {
        return descriptor;
    }

    @Override
    public DBField addField(IDBFieldDescriptor descriptor) throws SQLException {
        schema.assertUnlocked();
        if (getField(descriptor) != null) {
            throw new DBException("DBField exists: " + descriptor.getName()); //$NON-NLS-1$
        }

        DBField field=createField(descriptor);

        fields.add(field);
        return field;
    }

    protected DBField createField(IDBFieldDescriptor descriptor) throws SQLException {
        String type=null;
        DBNativeFieldDescriptor internalDescriptor=(DBNativeFieldDescriptor) descriptor;
        if (internalDescriptor.isAttribute()) {
            type=schema.getTypeProvider().replace(internalDescriptor.getColumnTypeAnnotation());//EcoreUtil.getAnnotation(attribute, DBAnnotation.SOURCE_URI, DBAnnotation.COLUMN_TYPE);
            if (type != null) {
                String length=internalDescriptor.getColumnLengthAnnotation();//EcoreUtil.getAnnotation(attribute, DBAnnotation.SOURCE_URI, DBAnnotation.COLUMN_LENGTH);
                if (length != null)
                    type+=getLeftParenthesis() + length + getRightParenthesis();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEString())) {
                type=schema.getTypeProvider().getStringType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEBoolean())
                    || descriptor.getType().equals(EcorePackage.eINSTANCE.getEBooleanObject())) {
                type=schema.getTypeProvider().getBooleanType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEInt()) || descriptor.getType().equals(EcorePackage.eINSTANCE.getEIntegerObject())) {
                type=schema.getTypeProvider().getIntType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getELong()) || descriptor.getType().equals(EcorePackage.eINSTANCE.getELongObject())) {
                type=schema.getTypeProvider().getLongType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEFloat()) || descriptor.getType().equals(EcorePackage.eINSTANCE.getEFloatObject())) {
                type=schema.getTypeProvider().getFloatType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEDouble())
                    || descriptor.getType().equals(EcorePackage.eINSTANCE.getEDoubleObject())) {
                type=schema.getTypeProvider().getDoubleType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEByteArray())) {
                type=schema.getTypeProvider().getByteArrayType();
            } else if (descriptor.getType().equals(EcorePackage.eINSTANCE.getEDate())) {
                type=schema.getTypeProvider().getDateType();
            } else if (descriptor.getType() instanceof EEnum) {
                type=schema.getTypeProvider().getIntType();
            } else {
                throw new UnsupportedOperationException("Unsupported type : " + descriptor.getType()); //$NON-NLS-1$
            }
            type=MessageFormat.format("{0} NULL DEFAULT NULL", type); //$NON-NLS-1$
        } else /* if (feature instanceof EReference) */{
            //            EReference ref=(EReference) feature;
            type=MessageFormat.format("{0} NULL DEFAULT NULL", /* CDO compliant: ref.isContainment() ? "BIGINT(20)" :*/schema.getTypeProvider().getIntType()); //$NON-NLS-1$
        }

        Statement statement=null;
        try {
            statement=getSchema().getConnection().createStatement();
            String name=descriptor.getName();//getSchema().getColumnName(feature);//SafeName(DBQueryUtil.getColumnNameExt(schema, feature));

            if (!columnNames.contains(name.toUpperCase())) {
                statement.execute("ALTER TABLE " + getDescriptor().getName() + " ADD " + name + ' ' + type);
                columnNames.add(name.toUpperCase());
            }

            if (internalDescriptor.hasInheritance()) {
                EStructuralFeature feature=((DBModelFieldDescriptor) internalDescriptor).getFeature();
                for (Entry<String, String> entry : getAdditionnalColumns((EReference) feature).entrySet()) {
                    String columnName=entry.getKey();
                    if (!columnNames.contains(columnName.toUpperCase())) {
                        statement.execute("ALTER TABLE " + getDescriptor().getName() + " ADD " + columnName + ' ' + entry.getValue() + " NULL DEFAULT NULL");
                        columnNames.add(columnName.toUpperCase());
                    }
                }
            }
            return wrap(descriptor);
        } finally {
            DBUtil.close(statement);
        }
    }

    protected char getRightParenthesis() {
        return ')';
    }

    protected char getLeftParenthesis() {
        return '(';
    }

    @Override
    public IDBField getField(IDBFieldDescriptor descriptor) throws SQLException {
        initFields();
        for (DBField field : Iterables.filter(fields, DBField.class)) {
            if (descriptor.getName().equals(field.getDescriptor().getName())) {
                return field;
            }
        }

        return null;
    }

    private void initFields() throws SQLException {
        if (fields == null) {
            Statement statement=null;
            ResultSet rSet=null;
            try {
                Connection connection=schema.getConnection();
                statement=connection.createStatement();
                Multimap<String, EStructuralFeature> map=HashMultimap.create();
                if (descriptor instanceof DBModelTableDescriptor) {
                    for (EStructuralFeature feature : ((DBModelTableDescriptor) descriptor).getEClass().getEAllStructuralFeatures()) {
                        if (feature.getUpperBound() == 1) {
                            map.put(schema.getColumnName(feature).toUpperCase(), feature);
                        }
                    }
                }
                fields=Lists.newArrayList();

                rSet=doListColumns(statement, fields, map.asMap());
            } finally {
                DBUtil.close(rSet);
                DBUtil.close(statement);
            }
        }
    }

    private ResultSet doListColumns(Statement statement, List<IDBField> fields, Map<String, Collection<EStructuralFeature>> names) throws SQLException {
        ResultSet resultSet=statement.executeQuery(getListColumnSQL());
        ResultSetMetaData metaData=resultSet.getMetaData();
        for (int i=1; i <= metaData.getColumnCount(); i++) {
            columnNames.add(metaData.getColumnName(i).toUpperCase());
        }
        for (String columnName : columnNames) {
            Collection<EStructuralFeature> features=names.get(columnName);
            if (features != null) {
                main: for (EStructuralFeature feature : features) {
                    boolean hasInheritance=DBModelInformationCache.hasInheritance(feature);
                    if (hasInheritance) {
                        //                        for (Entry<Function<EReference, String>, String> entry : DBUtil.INHERITANCE_COLUMN_NAMES.entrySet()) {
                        for (Entry<String, String> entry : getAdditionnalColumns((EReference) feature).entrySet()) {
                            String columnNameExt=entry.getKey();
                            if (!columnNames.contains(columnNameExt)) {
                                continue main;
                            }
                        }
                    }
                    fields.add(wrap(new DBModelFieldDescriptor(schema, feature)));
                }
            } else {
                fields.add(wrap(new DBNativeFieldDescriptor(columnName, null /* TODO */)));
            }
        }

        return resultSet;
    }

    protected String getListColumnSQL() {
        return "SELECT * FROM " + descriptor.getName() + " LIMIT 1"; //$NON-NLS-1$ //$NON-NLS-2$
    }

    private Map<String, String> getAdditionnalColumns(EReference ref) {
        String columnName=schema.getColumnName(ref);
        Map<String, String> result=new LinkedHashMap<String, String>();
        result.put(columnName + DBUtil.INTERNAL_CLASS_SUFFIX, schema.getTypeProvider().getIntType());
        result.put(columnName + DBUtil.INTERNAL_CLASS_NAME_SUFFIX, schema.getTypeProvider().getClassNameType());
        return result;
    }

    protected abstract DBField wrap(IDBFieldDescriptor descriptor);

    @Override
    public int getFieldCount() throws SQLException {
        initFields();
        return fields.size();
    }

    @Override
    public IDBField[] getFields() throws SQLException {
        initFields();
        return fields.toArray(new IDBField[fields.size()]);
    }

    @Override
    public int getIndexCount() throws SQLException {
        initIndices();
        return indices.size();
    }

    @Override
    public DBIndex[] getIndices() throws SQLException {
        initIndices();
        return indices.toArray(new DBIndex[indices.size()]);
    }

    private void initIndices() throws SQLException {
        ResultSet rSet=schema.getConnection().getMetaData().getIndexInfo(null, "public", descriptor.getName().toLowerCase(), false, false); //$NON-NLS-1$
        Multimap<String, String> indexes=HashMultimap.create();
        while (rSet.next()) {
            String indexName=rSet.getString(6 /* "INDEX_NAME" */);
            String colName=rSet.getString(9 /* "COLUMN_NAME" */);
            indexes.put(indexName, colName.toLowerCase());
        }

        for (String indexName : indexes.keySet()) {
            Collection<String> columns=indexes.get(indexName);
            List<IDBField> fields=new ArrayList<IDBField>(columns.size());
            for (String column : columns) {
                column=column.toLowerCase();
                for (IDBField field : getFields()) {
                    if (field.getDescriptor().getName().toLowerCase().equals(column)) {
                        fields.add(field);
                        break;
                    }
                }
            }
            Assert.isTrue(!fields.isEmpty(), "Index found without fields !?"); //$NON-NLS-1$
            indices.add(new DBIndex(this, indexName, fields.toArray(new IDBField[fields.size()])));
        }
    }
}
