#!/bin/bash
#title           :urlPopularityClassification.sh
#description     :This script takes a set of links posted by various users and 
#                     (a) creates features (network based and selectivity based) and classes for the posts 
#                     (b) tries to predict the number of clicks obtained by a post
#author          :arthi
#date            :20140201
#usage           :./urlPopularityClassification.sh <tab separated file of posts> <number of clicks per post> <graph of network>
#notes           :Install Vim and Emacs to use this script.
#==============================================================================

POSTS=$1 # csv with user,link posted,time of posting
POST_COUNTS=$2 # space separated file with the number of clicks (bitly) and link
GRAPH_SORTED=$3 # social network graph in tab delimited form: "person followed\tfollower"

if [ "$#" -ne 3 ]; then
    echo "Illegal number of parameters"
fi

# get features and classes
# output in form: url class1 class2 ...
cut -f2 -d',' $POSTS | sort | uniq -c > url_link_counts.txt # number of posts per link
sort $POSTS | uniq | cut -f1 -d"," | sort | uniq -c > "num_posts_per_user.nytimes.txt" # num link each user posts
python getURLsReceived.py ${GRAPH_SORTED} $POSTS > "num_posts_per_user_received.txt" # num links each user receives
python getPossibleClasses.py $POSTS "url_link_counts.txt"  ${POST_COUNTS} "classes.txt"

# features in tsv
python getFeatures.py $POSTS "url_link_counts.txt" "num_posts_per_user.txt" "num_posts_per_user_received.txt" ${POST_COUNTS} "features.txt"

# split into training and test
# first hour of a link is the training; remaining time is the test data
python getTrainingAndTestFirstMinutes.py $POSTS "nytimes.training_1hr.txt" "nytimes.test_1hr.txt" 

# classify!
python trainAndTestWithDiffClasses.py "features.txt" "classes.txt" "nytimes.training_1hr.txt" "nytimes.test_1hr.txt"