#!/bin/sh
# Copyright (c) 2013, bkil.hu
# This program is free software and can be distributed under the terms of
# the GNU General Public License v2,
# see COPYING for detailed licensing terms.

# User info:
# Tool to grab EPG from EIT broadcast in Hungary on DVB-T.
# Also contains a conversion step which outputs xmltv.
# Required: dvb-apps (scan, tzap, dvbsnoop)
# Recommend: w-scan (w_scan)
#
# Developer info:
# Output tested on freeguide.
# Note: local DVB-T operator sometimes advertises programs cross-mux
#       (all-dump workaround present)
# TODO: introduce a few more parameters
# TODO: relocate $W under /tmp after enough testing
# TODO: initiate parsing of each multiplex as soon as they are grabbed
# TODO: would have been nice to use &quot; as well but makes regexp ugly
# TODO: review what other information is present in the EIT
# TODO: parse a few more tags to XML
# TODO: unit testability, unit tests
# TODO: tzap error handling (e.g., occupied card)
# TODO: better content descriptor parsing in snoop2tag_soup
# TODO: implement w_scan channel init step
# TODO: packet number adaptation in dump_eit
#
# created on 2013-06-29

main() {
 CHANNELS="$HOME/channels.txt" # produced by (w_)scan, etc.
 ARCPRE="$HOME/tv-guide-archive"
 mkdir -p "$ARCPRE"
 FS="<"
 W="$PWD/dvb-epg.tmp"
 local CREATED=""
 if [ -d "$W" ]; then
  echo "warning: using already existing $W" >&2
 else
  mkdir -p "$W" || exit 1
  local CREATED="1"
 fi

 generate

 [ -n "$CREATED" ] && rm -R "$W"
 true
}

generate() {
 dump || return 1
 printf "parse EIT " >&2
 parse || return 1
 echo >&2
 printf "combine " >&2
 local COMB="$W/comb.csv"
 combine "$COMB" || exit 1
 echo >&2
 printf "archive " >&2
 archive "$COMB"
 echo >&2
 printf "filter " >&2
 local FCOMB="$W/fcomb.csv"
 filter "$COMB" "$FCOMB"
 echo >&2
 echo "serialize" >&2
 serialize "$HOME/.freeguide-full" "$COMB"
 serialize "$HOME/.freeguide-filtered" "$FCOMB"
 true
}

combine() {
 cat "$W"/*.subresults.csv |
 sort -u > "$1"
}

serialize() {
 local DIR="$1"
 shift 1
 [ -d "$DIR" ] || return 1
 local DATE=`date +%s`
 local OUT="$DIR/dvb-$DATE.xmltv"
 cat "$@" |
 csv2xmltv > "$OUT"
 echo " saved as $OUT" >&2
}

filter() {
 local IN="$1"
 local OUT="$2"
 local TMPHIDE=`ourtemp`
 local TMPCUT=`ourtemp`
 local D=`dirname "$0"`
 local PRUNECH="$D/prune-channel.txt"
 local PRUNETITLE="$D/prune-title.txt"
 local PRUNETITLE2="$D/prune-title-partial.txt"
 {
  cat $PRUNETITLE |
  grep -v "^$" |
  sed "s~^~<~ ; s~$~<~"
  cat $PRUNETITLE2
 } |
 grep -v "^$" > $TMPHIDE
 {
  cat $PRUNECH |
  grep -v "^$" |
  sed "s~^~<~ ; s~$~<~"
 } |
 grep -v "^$" > $TMPCUT
 {
  grep -v -F -f $TMPCUT $IN |
  grep -v -F -f $TMPHIDE

  for FILTER in $TMPHIDE; do
   grep -v -F -f $TMPCUT $IN |
   grep -F -f $FILTER |
   sed -r "s~^(([^$FS]*$FS){3})([^$FS]*)$FS(.*)$~\1$FS\3 \4~"
  done
 } > $OUT
 rm "$TMPHIDE" "$TMPCUT"
}

archive() {
 local TMP=`ourtemp`
 cat "$1" |
 sed -r "s~^([0-9]{8}).*$~\1~" |
 uniq |
 while read DAY; do
  local ARC="$ARCPRE/$DAY.csv.bz2"
  {
   bzcat $ARC 2>/dev/null
   grep --no-filename -E "^$DAY" $W/*.subresults.csv
  } |
  sort --stable --unique |
  bzip2 -9 > "$TMP" # TODO: implement custom meld which deletes old one if intersecting
  mv "$TMP" "$ARC"
 done
}

ourtemp() {
 local O="`basename "$0"`"
 tempfile -p "epgd-" -s ".$O.csv.tmp"
}

dump() {
 local FQS="$W/freqs.txt"
 [ -f "$FQS" ] || get_fq "$FQS"

 cat "$FQS" |
 while read FQ; do
  grep -E "^[^:]+:${FQ}:" "$CHANNELS" |
  head -n 1 |
  cut -d ":" -f 1 |
  {
   read CH
   local SDT="$W/$FQ.dump.sdt.txt"
   local EIT="$W/$FQ.dump.eit.sec"
   zap_dump "$CH" "$SDT" "$EIT"
  }
 done
 true
}

parse() {
 local FQS="$W/freqs.txt"
 [ -f "$FQS" ] || get_fq "$FQS"

 local ALL="$W"/all-dump.sdt.txt
 if [ ! -f "$ALL" ]; then
  cat "$W"/*.dump.sdt.txt > "$ALL" # broadcasting bug
 fi

 cat "$FQS" |
 while read FQ; do
  local OUT="$W/$FQ.subresults.csv"
  [ ! -f "$OUT" ] &&
   parse_eit "$W/all-dump.sdt.txt" "$W/$FQ.dump.eit.sec" > "$OUT" &
  echo $!
 done |
 while read PID; do
  wait $PID
 done
 true
}

get_fq() {
 local FQS="$1"
 cut -d ":" -f 2 "$CHANNELS" |
 grep -v "^$" |
 sort |
 uniq > "$FQS"
}

zap_dump() {
 local CH="$1"
 local SDT="$2"
 local EIT="$3"
 local TF="$W/tmp.fifo"
 [ -f "$SDT" ] && [ -f "$EIT" ] && return
 printf "zapping on $CH " >&2
 mkfifo $TF 2>/dev/null
 tzap -F -H -c "$CHANNELS" "$CH" > $TF 2>&1 &
 local PID=$!
 sleep 0.5
 cat < $TF |
 {
  while
   read L
   [ "$L" != "FE_HAS_LOCK" ]
  do
   printf "." >&2
   sleep 1
  done
  echo >&2
  [ -f $SDT ] || dump_parse_sdt $SDT
  [ -f $EIT ] || dump_eit $EIT
  kill $PID
  wait $PID
 }
 sleep 0.5
}

dump_parse_sdt() {
 printf "dumping SDT" >&2
 scan -q -c -p -U -t 3 -o pids | # 0x0011
 sed -r "s~^([^(]+[^( ]) +\((0x[0-9a-f]+)\).*$~\2${FS}\1~" > "$1"
 echo >&2
}

dump_eit() {
 printf "dumping EIT: " >&2
 time dvbsnoop -s sec -crc -n 3000 0x12 -b > "$1"

# 10000 ~ 155 sec on muxA, 10000 ~ 99 sec on muxC, 2000 ~ 21sec on muxB
# actually it could be estimated from dvbtraffic packet rate and a reasonable update interval
# or the value could be set iteratatively and stored between runs,
# to target >1 repetitions of every event ID (no bit errors, missing packets, etc.)
}

parse_eit() {
 local SDT="$1"
 local EIT="$2"
 dvbsnoop -s sec -crc -nph -if "$EIT" |
 snoop2tag_soup |
 tag_soup2csv "$SDT" |
 duration2end_time |
 sort -u
}

snoop2tag_soup() {
 iconv -f iso-8859-2 -t utf-8 |
 grep -E "^ *((Event_ID|Service_ID|Start_time|Duration|event_name|text_char|text):|Rating: *[1-9]|\[= .*\]$)" |
 grep -v "\[= original language\]" |
#  s~\"~\&quot;~
 sed -r "
  s~&~\&amp;~g
  s~<~\&lt;~g
  s~>~\&gt;~g

  s~^ *~~

  s~^(Service_ID|Event_ID): [0-9]+ \((0x[0-9a-f]+)\).*$~\1${FS}\2~
  s~^(Start_time): 0x[0-9a-f]+ \[= +([^ ]+) ([^ ]+) +\(UTC\)\]$~\1${FS}\2T\3Z~
  s~^(Duration): 0x[0-9a-f]+ \[= +([^ ].*) \(UTC\)\]$~\1${FS}\2~
  s~^(event_name|text_char|text): +\"\.\.(.*)\" +-- Charset: ISO/IEC 8859  special table *$~\1${FS}\2~
  s~^\[= +([^ ].*[^ ]) +\((without music, |)general\)\]$~content_descriptor${FS}\1~
  s~^Rating: +[0-9]+ \(0x[0-9a-f]+\) +\[= +([^ ].*)\]$~parental_rating_descriptor${FS}\1~
 " |
 grep -vE "^\[= reserved\]$"
}

tag_soup2csv() {
 local SDT="$1"
 awk -F "$FS" -v sdt="$SDT" '
  {
   if (($1 == "Event_ID") || ($1 == "Service_ID")) {
    if (event_id != "") {
     printall();
    }
    if ($1 == "Event_ID") {
     event_id = $2
    } else {
     service_id = $2
     cmd = "fgrep " service_id " " sdt
     cmd | getline
     service_name = $2
     close(cmd)
    }
   } else if ($1 == "Start_time") {
    start_time = $2
   } else if ($1 == "Duration") {
    duration = $2
   } else if ($1 == "event_name") {
    event_name = $2
   } else if ($1 == "text_char") {
    text_char = (text_char $2)
   } else if ($1 == "text") {
    text = (text $2)
   } else if ($1 == "content_descriptor") {
    cdesc = $2
   } else if ($1 == "parental_rating_descriptor") {
    prating = $2
   } else {
    text = (text "(warning: unparsed tag: " $1 ")")
   }
  }
  END {
    printall();
  }
 function printall() {
    if (event_id != "") {
     print start_time FS duration FS service_name FS event_name FS text_char FS text FS cdesc FS prating FS event_id
     event_id = ""
     start_time = ""
     duration = ""
     event_name = ""
     text_char = ""
     text = ""
     cdesc = ""
     prating = ""
    }
 }
 '
}

duration2end_time() {
 awk -F "$FS" '
  BEGIN{
   OFS=FS
  }
  {
     line = $0
     start_time = $1
     duration = $2
     cmd = ("date -u -d \"" start_time "\" \"+%s" FS "%Y%m%d%H%M%S\"")
     cmd | getline
     start_time_sec = $1
     start_time = ($2 " +0000")
     close(cmd)
     n = split(duration, d, ":")
     end_time_sec = start_time_sec + (d[1]*60+d[2])*60+d[3]
     cmd = "date -u -d \"@" end_time_sec "\" \"+%Y%m%d%H%M%S +0000\""
     cmd | getline end_time
     close(cmd)
     $0 = line
     $1 = start_time
     $2 = end_time
     print
  }
 '
}

csv2xmltv() {
# "(ism.)" '<previously-shown start="20130611000000" />'
#    <episode-num system="dd_progid">EP01006886.0028</episode-num>
#    <episode-num system="onscreen">427</episode-num>
# "feliratozva"   '<subtitles type="teletext" />'
 cat << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE tv SYSTEM "xmltv.dtd">
<tv source-info-name="DVB-T EIT" generator-info-name="dvb-epg.sh">
EOF
 local TM="all-result.tmp.csv"

 tee "$TM" |
 cut -d "$FS" -f 3 |
 sort |
 uniq |
 sed 's~^.*$~ <channel id="&">\
  <display-name>&</display-name>\
 </channel>~'

 sed -r "s~^([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)${FS}([^${FS}]*)$~ <programme start=\"\1\" stop=\"\2\" channel=\"\3\">\
  <title lang=\"hu\">\4</title>\
  <desc lang=\"hu\">\5\6 \8</desc>\
  <category lang=\"en\">\7</category>\
 </programme>~" "$TM"
 rm "$TM"

 cat << EOF
</tv>
EOF
}

main "$@"

