import { useState, useCallback } from 'react'
import { ArxivPaper } from '../types/auth'
import { useAuth } from '../contexts/AuthProvider'

interface UsePapersReturn {
  papers: ArxivPaper[]
  loading: boolean
  error: string | null
  totalCount: number
  hasMore: boolean
  fetchPapers: (query?: string, filters?: any) => Promise<void>
  fetchPersonalizedPapers: () => Promise<void>
  fetchPaper: (arxivId: string) => Promise<ArxivPaper | null>
  bookmarkPaper: (paperId: string) => Promise<void>
  unbookmarkPaper: (paperId: string) => Promise<void>
  ratePaper: (paperId: string, rating: number) => Promise<void>
  loadMore: () => Promise<void>
}

export function usePapers(): UsePapersReturn {
  const [papers, setPapers] = useState<ArxivPaper[]>([])
  const [loading, setLoading] = useState(false)
  const [error, setError] = useState<string | null>(null)
  const [totalCount, setTotalCount] = useState(0)
  const [, setCurrentPage] = useState(1)
  const [hasMore, setHasMore] = useState(false)
  
  const { userProfile } = useAuth()

  // Mock data for development
  const mockPapers: ArxivPaper[] = [
    {
      id: '1',
      arxiv_id: '2301.00001',
      title: 'Attention Is All You Need: A Comprehensive Survey',
      authors: ['Ashish Vaswani', 'Noam Shazeer', 'Niki Parmar'],
      abstract: 'The dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely.',
      categories: ['cs.CL', 'cs.LG'],
      published_date: '2023-01-01T00:00:00Z',
      updated_date: '2023-01-01T00:00:00Z',
      pdf_url: 'https://arxiv.org/pdf/2301.00001.pdf',
      subjects: ['Natural Language Processing', 'Machine Learning']
    },
    {
      id: '2',
      arxiv_id: '2301.00002', 
      title: 'BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding',
      authors: ['Jacob Devlin', 'Ming-Wei Chang', 'Kenton Lee'],
      abstract: 'We introduce a new language representation model called BERT, which stands for Bidirectional Encoder Representations from Transformers. Unlike recent language representation models, BERT is designed to pre-train deep bidirectional representations from unlabeled text by jointly conditioning on both left and right context in all layers.',
      categories: ['cs.CL', 'cs.AI'],
      published_date: '2023-01-02T00:00:00Z',
      updated_date: '2023-01-02T00:00:00Z',
      pdf_url: 'https://arxiv.org/pdf/2301.00002.pdf',
      subjects: ['Natural Language Processing', 'Artificial Intelligence']
    },
    {
      id: '3',
      arxiv_id: '2301.00003',
      title: 'GPT-3: Language Models are Few-Shot Learners',
      authors: ['Tom B. Brown', 'Benjamin Mann', 'Nick Ryder'],
      abstract: 'Recent work has demonstrated substantial gains on many NLP tasks and benchmarks by pre-training on a large corpus of text followed by fine-tuning on a specific task. While typically task-agnostic in architecture, this method still requires task-specific fine-tuning datasets of thousands or tens of thousands of examples.',
      categories: ['cs.CL', 'cs.AI'],
      published_date: '2023-01-03T00:00:00Z',
      updated_date: '2023-01-03T00:00:00Z',
      pdf_url: 'https://arxiv.org/pdf/2301.00003.pdf',
      subjects: ['Natural Language Processing', 'Artificial Intelligence']
    }
  ]

  const fetchPapers = useCallback(async (query?: string, _filters?: any) => {
    setLoading(true)
    setError(null)
    
    try {
      // Simulate API call
      await new Promise(resolve => setTimeout(resolve, 1000))
      
      // Filter mock papers based on query
      let filteredPapers = mockPapers
      if (query) {
        const lowerQuery = query.toLowerCase()
        filteredPapers = mockPapers.filter(paper => 
          paper.title.toLowerCase().includes(lowerQuery) ||
          paper.abstract.toLowerCase().includes(lowerQuery) ||
          paper.authors.some(author => author.toLowerCase().includes(lowerQuery)) ||
          paper.categories.some(cat => cat.toLowerCase().includes(lowerQuery))
        )
      }
      
      setPapers(filteredPapers)
      setTotalCount(filteredPapers.length)
      setCurrentPage(1)
      setHasMore(false)
    } catch (err) {
      setError(err instanceof Error ? err.message : 'Failed to fetch papers')
    } finally {
      setLoading(false)
    }
  }, [mockPapers])

  const fetchPersonalizedPapers = useCallback(async () => {
    setLoading(true)
    setError(null)
    
    try {
      // Simulate personalized recommendation based on user preferences
      await new Promise(resolve => setTimeout(resolve, 800))
      
      const userCategories = userProfile?.paper_preferences?.categories || []
      const personalizedPapers = mockPapers.filter(paper =>
        paper.categories.some(cat => userCategories.includes(cat))
      )
      
      setPapers(personalizedPapers.length > 0 ? personalizedPapers : mockPapers)
      setTotalCount(personalizedPapers.length)
      setCurrentPage(1)
      setHasMore(false)
    } catch (err) {
      setError(err instanceof Error ? err.message : 'Failed to fetch personalized papers')
    } finally {
      setLoading(false)
    }
  }, [userProfile?.paper_preferences?.categories])

  const fetchPaper = useCallback(async (arxivId: string): Promise<ArxivPaper | null> => {
    try {
      // Simulate API call
      await new Promise(resolve => setTimeout(resolve, 500))
      
      const paper = mockPapers.find(p => p.arxiv_id === arxivId)
      return paper || null
    } catch (err) {
      console.error('Error fetching paper:', err)
      return null
    }
  }, [mockPapers])

  const bookmarkPaper = useCallback(async (paperId: string) => {
    try {
      // Simulate API call to bookmark paper
      await new Promise(resolve => setTimeout(resolve, 300))
      console.log('Bookmarked paper:', paperId)
    } catch (err) {
      throw new Error('Failed to bookmark paper')
    }
  }, [])

  const unbookmarkPaper = useCallback(async (paperId: string) => {
    try {
      // Simulate API call to unbookmark paper
      await new Promise(resolve => setTimeout(resolve, 300))
      console.log('Unbookmarked paper:', paperId)
    } catch (err) {
      throw new Error('Failed to unbookmark paper')
    }
  }, [])

  const ratePaper = useCallback(async (paperId: string, rating: number) => {
    try {
      // Simulate API call to rate paper
      await new Promise(resolve => setTimeout(resolve, 300))
      console.log('Rated paper:', paperId, 'Rating:', rating)
    } catch (err) {
      throw new Error('Failed to rate paper')
    }
  }, [])

  const loadMore = useCallback(async () => {
    if (!hasMore || loading) return
    
    setLoading(true)
    try {
      // Simulate loading more papers
      await new Promise(resolve => setTimeout(resolve, 1000))
      // In real implementation, this would fetch the next page
      setCurrentPage(prev => prev + 1)
    } catch (err) {
      setError(err instanceof Error ? err.message : 'Failed to load more papers')
    } finally {
      setLoading(false)
    }
  }, [hasMore, loading])

  return {
    papers,
    loading,
    error,
    totalCount,
    hasMore,
    fetchPapers,
    fetchPersonalizedPapers,
    fetchPaper,
    bookmarkPaper,
    unbookmarkPaper,
    ratePaper,
    loadMore
  }
}