import React, { useState, useEffect } from 'react';
import * as tf from '@tensorflow/tfjs';
import { LSTM } from '../../models/lstmModel';
import { Transformer } from '../../models/transformerModel';
import { preprocessData } from '../../utils/dataPreprocessor';
import { tuneHyperparameters } from '../../utils/hyperparamTuner';
import './ModelTrainer.css';

function ModelTrainer({ data, setModel, setTrainingHistory }) {
  const [modelType, setModelType] = useState('lstm');
  const [sequenceLength, setSequenceLength] = useState(10);
  const [epochs, setEpochs] = useState(50);
  const [batchSize, setBatchSize] = useState(32);
  const [learningRate, setLearningRate] = useState(0.001);
  const [isTraining, setIsTraining] = useState(false);
  const [autoTune, setAutoTune] = useState(false);
  const [progress, setProgress] = useState(0);
  const [bestParams, setBestParams] = useState(null);
  const [trainingLog, setTrainingLog] = useState([]);

  useEffect(() => {
    return () => {
      // Clean up TF.js memory when component unmounts
      tf.disposeVariables();
    };
  }, []);

  const handleTrain = async () => {
    if (!data || data.length === 0) return;
    
    setIsTraining(true);
    setProgress(0);
    setTrainingLog([]);
    
    try {
      // Preprocess data
      const { X, y, numClasses } = preprocessData(data, sequenceLength);
      
      let model;
      let history;
      
      if (autoTune) {
        // Auto hyperparameter tuning
        setTrainingLog(prev => [...prev, 'Starting hyperparameter tuning...']);
        
        const params = await tuneHyperparameters(
          data,
          modelType,
          sequenceLength,
          (log) => setTrainingLog(prev => [...prev, log]),
          (p) => setProgress(p)
        );
        
        setBestParams(params);
        setTrainingLog(prev => [...prev, `Best params found: ${JSON.stringify(params)}`]);
        
        // Train with best params
        if (modelType === 'lstm') {
          model = LSTM(numClasses, params.sequenceLength, params.units, params.learningRate);
        } else {
          model = Transformer(numClasses, params.sequenceLength, params.dModel, params.numHeads, params.learningRate);
        }
        
        setTrainingLog(prev => [...prev, 'Training final model with best parameters...']);
        history = await model.fit(X, y, {
          epochs: params.epochs,
          batchSize: params.batchSize,
          validationSplit: 0.2,
          callbacks: {
            onEpochEnd: (epoch, logs) => {
              setProgress(((epoch + 1) / params.epochs) * 100);
              setTrainingLog(prev => [...prev, 
                `Epoch ${epoch + 1}/${params.epochs} - loss: ${logs.loss.toFixed(4)} - accuracy: ${logs.acc.toFixed(4)}`
              ]);
            }
          }
        });
      } else {
        // Manual training
        if (modelType === 'lstm') {
          model = LSTM(numClasses, sequenceLength, 64, learningRate);
        } else {
          model = Transformer(numClasses, sequenceLength, 64, 4, learningRate);
        }
        
        setTrainingLog(prev => [...prev, 'Starting model training...']);
        history = await model.fit(X, y, {
          epochs: epochs,
          batchSize: batchSize,
          validationSplit: 0.2,
          callbacks: {
            onEpochEnd: (epoch, logs) => {
              setProgress(((epoch + 1) / epochs) * 100);
              setTrainingLog(prev => [...prev, 
                `Epoch ${epoch + 1}/${epochs} - loss: ${logs.loss.toFixed(4)} - accuracy: ${logs.acc.toFixed(4)}`
              ]);
            }
          }
        });
      }
      
      setModel(model);
      setTrainingHistory(history);
      setTrainingLog(prev => [...prev, 'Training completed!']);
    } catch (error) {
      console.error('Training error:', error);
      setTrainingLog(prev => [...prev, `Error: ${error.message}`]);
    } finally {
      setIsTraining(false);
    }
  };

  return (
    <div className="card">
      <h2>Model Trainer</h2>
      
      <div className="form-group">
        <label>Model Type</label>
        <select 
          value={modelType} 
          onChange={(e) => setModelType(e.target.value)}
          disabled={isTraining}
        >
          <option value="lstm">LSTM</option>
          <option value="transformer">Transformer</option>
        </select>
      </div>
      
      <div className="form-group">
        <label>Sequence Length</label>
        <input 
          type="number" 
          min="1" 
          max="100" 
          value={sequenceLength} 
          onChange={(e) => setSequenceLength(parseInt(e.target.value) || 10)}
          disabled={isTraining || autoTune}
        />
      </div>
      
      {!autoTune && (
        <>
          <div className="form-group">
            <label>Epochs</label>
            <input 
              type="number" 
              min="1" 
              max="200" 
              value={epochs} 
              onChange={(e) => setEpochs(parseInt(e.target.value) || 50)}
              disabled={isTraining}
            />
          </div>
          
          <div className="form-group">
            <label>Batch Size</label>
            <input 
              type="number" 
              min="1" 
              max="128" 
              value={batchSize} 
              onChange={(e) => setBatchSize(parseInt(e.target.value) || 32)}
              disabled={isTraining}
            />
          </div>
          
          <div className="form-group">
            <label>Learning Rate</label>
            <input 
              type="number" 
              min="0.00001" 
              max="0.1" 
              step="0.00001" 
              value={learningRate} 
              onChange={(e) => setLearningRate(parseFloat(e.target.value) || 0.001)}
              disabled={isTraining}
            />
          </div>
        </>
      )}
      
      <div className="form-group">
        <label>
          <input 
            type="checkbox" 
            checked={autoTune} 
            onChange={(e) => setAutoTune(e.target.checked)}
            disabled={isTraining}
          />
          Auto Tune Hyperparameters
        </label>
      </div>
      
      <button 
        className="button" 
        onClick={handleTrain}
        disabled={!data.length || isTraining}
      >
        {isTraining ? 'Training...' : 'Train Model'}
      </button>
      
      {isTraining && (
        <div className="progress-container">
          <div className="progress-bar" style={{ width: `${progress}%` }}></div>
          <div className="progress-text">{Math.round(progress)}%</div>
        </div>
      )}
      
      {bestParams && autoTune && (
        <div className="best-params">
          <h3>Best Parameters Found</h3>
          <pre>{JSON.stringify(bestParams, null, 2)}</pre>
        </div>
      )}
      
      {trainingLog.length > 0 && (
        <div className="training-log">
          <h3>Training Log</h3>
          <div className="log-content">
            {trainingLog.map((log, index) => (
              <div key={index} className="log-entry">{log}</div>
            ))}
          </div>
        </div>
      )}
    </div>
  );
}

export default ModelTrainer;