#!/usr/bin/env python3
"""Debug tokenizer issue for Java code parsing"""

from repo_index.codeblocks.parser.create import create_parser

# Test Java code
java_code = """
public class HelloWorld {
    public static void main(String[] args) {
        System.out.println("Hello, World!");
    }
}
"""

print("Testing Java parser tokenization...")
print("=" * 50)

# Create parser for Java
parser = create_parser("java")

# Check tokenizer
print(f"Parser type: {type(parser)}")
print(f"Has tokenizer: {parser.tokenizer is not None}")
if parser.tokenizer:
    print(f"Tokenizer type: {type(parser.tokenizer)}")

    # Test tokenizer directly
    test_string = "Hello World"
    tokens = parser.tokenizer(test_string)
    print(f"Test string: '{test_string}'")
    print(f"Tokens: {tokens}")
    print(f"Token count: {len(tokens)}")

    # Test with Java code
    tokens = parser.tokenizer(java_code)
    print(f"\nJava code token count: {len(tokens)}")
else:
    print("WARNING: No tokenizer found!")

# Parse the Java code
module = parser.parse(java_code)

print("\n" + "=" * 50)
print("Parsed module info:")
print(f"Module type: {module.type}")
print(f"Module children count: {len(module.children)}")

# Check tokens in parsed blocks
def print_block_info(block, indent=0):
    prefix = "  " * indent
    print(f"{prefix}Block: {block.type} - {block.identifier}")
    print(f"{prefix}  Tokens: {block.tokens}")
    print(f"{prefix}  Content length: {len(block.content) if block.content else 0}")
    if hasattr(block, 'children'):
        for child in block.children:
            print_block_info(child, indent + 1)

print("\nBlock structure:")
print_block_info(module)

# Try to debug get_tokenizer
try:
    from llama_index.core import get_tokenizer
    print("\n" + "=" * 50)
    print("Testing get_tokenizer:")
    tokenizer = get_tokenizer()
    print(f"get_tokenizer result: {tokenizer}")
    print(f"Type: {type(tokenizer)}")

    if tokenizer:
        test_tokens = tokenizer("Test string")
        print(f"Test tokenization: {test_tokens}")
        print(f"Token count: {len(test_tokens)}")
except Exception as e:
    print(f"Error importing or using get_tokenizer: {e}")