import Foundation
import NaturalLanguage

public class NERProcessor {
    private var nerModel: ToothTagger?
    
    init() {
        do {
            self.nerModel = try ToothTagger(configuration: .init())
        } catch {
            print("Failed to load the NER Model: \(error.localizedDescription)")
        }
    }
    
    func predict(text: String) -> [String]? {
        guard let model = nerModel else { return nil }
        
        // Tokenize the input text.
        let tokenizer = NLTokenizer(unit: .word)
        tokenizer.string = text
        let tokenizedText = tokenizer.tokens(for: text.startIndex..<text.endIndex).map { text[$0] }
        
        // Make the prediction.
        guard let prediction = try? model.prediction(input: ToothTaggerInput(text: tokenizedText.description)) else { return nil }
        
        // Extract the entities.
        var extractedEntities = [String]()
        let tokens = prediction.tokens
        let tags = prediction.tokens
        
        for (index, tag) in tags.enumerated() {
            if tag == "B-NUM" || tag == "B-STATE" {
                var entity = tokens[index]
                var offset = 1
                
                while index + offset < tags.count && (tags[index + offset] == "I-NUM" || tags[index + offset] == "I-STATE") {
                    entity += " " + tokens[index + offset]
                    offset += 1
                }
                
                extractedEntities.append(entity)
            }
        }
        
        return extractedEntities
    }
}
