import os
import json
import threading
from datetime import datetime, timedelta
from typing import List, Dict, Any
from flask import Flask, request, jsonify, render_template, send_file

app = Flask(__name__, template_folder='templates', static_folder='static')
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
DATA_FILE = os.path.join(DATA_DIR, 'events.json')
IMAGES_FILE = os.path.join(DATA_DIR, 'images.json')
BASE_DATASET_DIR = os.getenv('FALL_IMG_BASE', 'C:/Users/lxh/Desktop/FallDataset').replace('\\', '/')
_lock = threading.Lock()


def ensure_data_file():
    os.makedirs(DATA_DIR, exist_ok=True)
    if not os.path.exists(DATA_FILE):
        seed = [
            {
                "ts": (datetime.utcnow() - timedelta(days=2)).isoformat() + "Z",
                "type": "fall",
                "confidence": 0.92,
                "device": "pi-accel-1",
                "magnitude": 2.8,
                "note": "卧室"
            },
            {
                "ts": (datetime.utcnow() - timedelta(days=1, hours=3)).isoformat() + "Z",
                "type": "fall",
                "confidence": 0.78,
                "device": "pi-accel-1",
                "magnitude": 2.1,
                "note": "客厅"
            },
            {
                "ts": (datetime.utcnow() - timedelta(hours=8)).isoformat() + "Z",
                "type": "fall",
                "confidence": 0.66,
                "device": "pi-accel-2",
                "magnitude": 1.9,
                "note": "走廊"
            }
        ]
        with open(DATA_FILE, 'w', encoding='utf-8') as f:
            json.dump(seed, f, ensure_ascii=False, indent=2)


def load_events() -> List[Dict[str, Any]]:
    ensure_data_file()
    with _lock:
        with open(DATA_FILE, 'r', encoding='utf-8') as f:
            try:
                return json.load(f)
            except Exception:
                return []


def save_events(items: List[Dict[str, Any]]):
    with _lock:
        with open(DATA_FILE, 'w', encoding='utf-8') as f:
            json.dump(items, f, ensure_ascii=False, indent=2)


def import_dataset_csv(path: str) -> int:
    """Import events from a CSV file with flexible headers for Fall-Down-Cls-v1."""
    import csv
    if not os.path.isabs(path):
        path = os.path.join(DATA_DIR, path)
    if not os.path.exists(path):
        return 0
    items = load_events()
    count = 0
    with open(path, 'r', encoding='utf-8') as f:
        reader = csv.DictReader(f)
        for row in reader:
            ts = row.get('ts') or row.get('timestamp') or row.get('time')
            label = row.get('type') or row.get('label') or row.get('class') or ''
            conf = row.get('confidence') or row.get('score') or row.get('prob') or ''
            mag = row.get('magnitude') or row.get('accel') or row.get('accel_mag') or row.get('mag') or ''
            dev = row.get('device') or row.get('sensor') or row.get('id') or 'dataset'
            note = row.get('note') or ''
            # normalize label
            l = str(label).strip().lower()
            if l in ('1', 'true', 'fall', 'falls'):
                l = 'fall'
            elif l in ('0', 'false', 'normal', 'non-fall', 'nonfall'):
                l = 'normal'
            else:
                l = 'fall' if row.get('fall_flag') in ('1', 'true') else (l or 'fall')
            try:
                ev = {
                    'ts': ts or datetime.utcnow().isoformat() + 'Z',
                    'type': l,
                    'confidence': float(conf) if conf != '' else 0.5,
                    'device': dev,
                    'magnitude': float(mag) if mag != '' else 1.0,
                    'note': note
                }
                items.append(ev)
                count += 1
            except Exception:
                continue
    save_events(items)
    return count


@app.get('/')
def index():
    return render_template('index.html')


@app.get('/api/health')
def health():
    return jsonify({"ok": True})


@app.get('/api/events')
def get_events():
    return jsonify(load_events())


@app.post('/api/events')
def post_event():
    try:
        payload = request.get_json(force=True)
    except Exception:
        return jsonify({"error": "Invalid JSON"}), 400

    if not isinstance(payload, dict):
        return jsonify({"error": "Body must be JSON object"}), 400

    ev = {
        "ts": payload.get("ts") or datetime.utcnow().isoformat() + "Z",
        "type": payload.get("type", "fall"),
        "confidence": float(payload.get("confidence", 0.5)),
        "device": payload.get("device", "unknown"),
        "magnitude": float(payload.get("magnitude", 1.0)),
        "note": payload.get("note", "")
    }
    items = load_events()
    items.append(ev)
    save_events(items)
    return jsonify({"ok": True, "event": ev})


@app.post('/api/import')
def import_dataset():
    payload = request.get_json(force=True, silent=True) or {}
    path = str(payload.get('path', '')).strip()
    if not path:
        return jsonify({"error": "path required"}), 400
    count = import_dataset_csv(path)
    return jsonify({"ok": True, "imported": count})


@app.get('/api/stats/daily')
def stats_daily():
    try:
        days = int(request.args.get('days', '30'))
    except Exception:
        days = 30
    items = load_events()
    cutoff = datetime.utcnow() - timedelta(days=days)
    buckets: Dict[str, int] = {}
    for i in range(days + 1):
        d = (cutoff + timedelta(days=i)).date().isoformat()
        buckets[d] = 0
    for ev in items:
        try:
            dt = datetime.fromisoformat(ev.get('ts', '').replace('Z', ''))
        except Exception:
            continue
        if dt >= cutoff:
            d = dt.date().isoformat()
            buckets[d] = buckets.get(d, 0) + 1
    labels = list(buckets.keys())
    values = [buckets[d] for d in labels]
    return jsonify({"labels": labels, "values": values})


@app.get('/api/stats/class-dist')
def stats_class_dist():
    items = load_events()
    buckets: Dict[str, int] = {}
    for ev in items:
        t = (ev.get('type') or 'fall').lower()
        buckets[t] = buckets.get(t, 0) + 1
    labels = list(buckets.keys())
    values = [buckets[l] for l in labels]
    return jsonify({"labels": labels, "values": values})


@app.get('/api/stats/conf-hist')
def stats_conf_hist():
    try:
        bins = int(request.args.get('bins', '20'))
    except Exception:
        bins = 20
    items = load_events()
    vals = [float(ev.get('confidence', 0.0)) for ev in items if 'confidence' in ev]
    bins = max(5, min(50, bins))
    edges = [i / bins for i in range(bins + 1)]
    counts = [0] * bins
    for v in vals:
        idx = min(bins - 1, max(0, int(v * bins)))
        counts[idx] += 1
    mids = [round((edges[i] + edges[i+1]) / 2, 2) for i in range(bins)]
    return jsonify({"labels": mids, "values": counts})


@app.get('/api/stats/mag-hist')
def stats_mag_hist():
    try:
        bins = int(request.args.get('bins', '20'))
    except Exception:
        bins = 20
    items = load_events()
    vals = [float(ev.get('magnitude', 0.0)) for ev in items]
    if not vals:
        return jsonify({"labels": [], "values": []})
    vmax = max(2.0, max(vals))
    edges = [i * (vmax / bins) for i in range(bins + 1)]
    counts = [0] * bins
    for v in vals:
        idx = min(bins - 1, max(0, int(v / (vmax + 1e-9) * bins)))
        counts[idx] += 1
    mids = [round((edges[i] + edges[i+1]) / 2, 2) for i in range(bins)]
    return jsonify({"labels": mids, "values": counts})


# ensure images index file
if not os.path.exists(DATA_DIR):
    os.makedirs(DATA_DIR, exist_ok=True)
if not os.path.exists(IMAGES_FILE):
    with open(IMAGES_FILE, 'w', encoding='utf-8') as f:
        json.dump([], f)

# image index helpers

def load_images():
    with open(IMAGES_FILE, 'r', encoding='utf-8') as f:
        try:
            return json.load(f)
        except Exception:
            return []


def save_images(items):
    with open(IMAGES_FILE, 'w', encoding='utf-8') as f:
        json.dump(items, f, ensure_ascii=False, indent=2)


# VOC XML parser
def parse_voc_xml(xml_path):
    import xml.etree.ElementTree as ET
    boxes = []
    try:
        tree = ET.parse(xml_path)
        root = tree.getroot()
        size_node = root.find('size')
        width = int(size_node.find('width').text) if size_node is not None else None
        height = int(size_node.find('height').text) if size_node is not None else None
        for obj in root.findall('object'):
            name = (obj.find('name').text or '').strip().lower()
            bb = obj.find('bndbox')
            if bb is None:
                continue
            xmin = float(bb.find('xmin').text)
            ymin = float(bb.find('ymin').text)
            xmax = float(bb.find('xmax').text)
            ymax = float(bb.find('ymax').text)
            boxes.append({'label': name, 'x1': xmin, 'y1': ymin, 'x2': xmax, 'y2': ymax, 'width': width, 'height': height, 'rel': False})
    except Exception:
        pass
    return boxes


# normalize label

def normalize_label(label, label_map=None):
    l = (str(label or '').strip().lower())
    if label_map and l in label_map:
        return label_map[l]
    if l in ('fall', 'falls', '1', 'true'):
        return 'fall'
    if l in ('normal', 'non-fall', 'nonfall', '0', 'false'):
        return 'normal'
    return 'fall'


# path util

def resolve_dir(path):
    p = str(path or '').strip().replace('\\', '/')
    if not p:
        return ''
    if os.path.isabs(p):
        return p
    return os.path.join(BASE_DATASET_DIR, p).replace('\\', '/')


@app.post('/api/images/import-voc')
def import_images_voc():
    payload = request.get_json(force=True, silent=True) or {}
    images_dir = resolve_dir(payload.get('images_dir', 'images'))
    voc_dir = resolve_dir(payload.get('voc_dir', 'annotations/voc'))
    label_map = payload.get('label_map') or {'fall': 'fall', 'normal': 'normal'}
    if not (os.path.exists(images_dir) and os.path.exists(voc_dir)):
        return jsonify({'error': 'images_dir or voc_dir not found', 'ok': False}), 400
    items = load_images()
    imported = 0
    # index xml by stem
    xml_index = {}
    for root, _, files in os.walk(voc_dir):
        for name in files:
            if name.lower().endswith('.xml'):
                stem = os.path.splitext(name)[0]
                xml_index[stem] = os.path.join(root, name)
    # match images by stem
    exts = ('.jpg', '.jpeg', '.png')
    for root, _, files in os.walk(images_dir):
        for name in files:
            if name.lower().endswith(exts):
                stem = os.path.splitext(name)[0]
                img_path = os.path.join(root, name).replace('\\', '/')
                if stem in xml_index:
                    boxes = parse_voc_xml(xml_index[stem])
                    raw_labels = [b.get('label') for b in boxes]
                    mapped = [normalize_label(lbl, label_map) for lbl in raw_labels]
                    label = 'fall' if ('fall' in mapped) else 'normal'
                    items.append({'path': img_path, 'label': label, 'boxes': boxes, 'confidence': 1.0})
                    imported += 1
    save_images(items)
    return jsonify({'ok': True, 'imported': imported})


@app.post('/api/images/import-txt')
def import_images_txt():
    payload = request.get_json(force=True, silent=True) or {}
    images_dir = resolve_dir(payload.get('images_dir', 'images'))
    txt_dir = resolve_dir(payload.get('txt_dir', 'annotations/txt'))
    class_map = payload.get('class_map') or {'0': 'fall', '1': 'normal'}
    if not (os.path.exists(images_dir) and os.path.exists(txt_dir)):
        return jsonify({'error': 'images_dir or txt_dir not found', 'ok': False}), 400
    items = load_images()
    imported = 0
    # index txt by stem
    txt_index = {}
    for root, _, files in os.walk(txt_dir):
        for name in files:
            if name.lower().endswith('.txt'):
                stem = os.path.splitext(name)[0]
                txt_index[stem] = os.path.join(root, name)
    exts = ('.jpg', '.jpeg', '.png')
    for root, _, files in os.walk(images_dir):
        for name in files:
            if name.lower().endswith(exts):
                stem = os.path.splitext(name)[0]
                img_path = os.path.join(root, name).replace('\\', '/')
                if stem in txt_index:
                    boxes = []
                    raw_labels = []
                    try:
                        with open(txt_index[stem], 'r', encoding='utf-8') as f:
                            for line in f:
                                parts = line.strip().split()
                                if len(parts) >= 5:
                                    cls = parts[0]
                                    x, y, w, h = map(float, parts[1:5])
                                    label = class_map.get(str(cls), 'fall')
                                    raw_labels.append(label)
                                    boxes.append({'label': label, 'cx': x, 'cy': y, 'w': w, 'h': h, 'rel': True})
                    except Exception:
                        continue
                    label = 'fall' if ('fall' in raw_labels) else 'normal'
                    items.append({'path': img_path, 'label': label, 'boxes': boxes, 'confidence': 1.0})
                    imported += 1
    save_images(items)
    return jsonify({'ok': True, 'imported': imported})


@app.get('/api/images')
def get_images():
    page = int(request.args.get('page', '1'))
    size = int(request.args.get('size', '30'))
    size = max(1, min(100, size))
    items = load_images()
    start = (page - 1) * size
    end = start + size
    return jsonify({'total': len(items), 'page': page, 'size': size, 'items': items[start:end]})


@app.get('/api/images/preview')
def preview_image():
    path = request.args.get('path', '')
    if not path:
        return jsonify({'error': 'path required'}), 400
    # restrict to allowed base dir
    rp = path.replace('\\', '/')
    base = BASE_DATASET_DIR.rstrip('/')
    if not rp.startswith(base):
        return jsonify({'error': 'out of base dir'}), 403
    try:
        return send_file(rp)
    except Exception:
        return jsonify({'error': 'file not found'}), 404


@app.get('/api/stats/images/class-dist')
def stats_images_class_dist():
    items = load_images()
    buckets = {'fall': 0, 'normal': 0}
    for it in items:
        buckets[it.get('label', 'fall')] = buckets.get(it.get('label', 'fall'), 0) + 1
    labels = list(buckets.keys())
    values = [buckets[l] for l in labels]
    return jsonify({'labels': labels, 'values': values})


if __name__ == '__main__':
    ensure_data_file()
    port = int(os.getenv('FALLDASH_PORT', '8001'))
    app.run(host='127.0.0.1', port=port, debug=True)