
from config.config import global_config
from crawler.query_weibo import get_headers
import utils.util as util
import json
import csv

weibo_file_name = 'weibo_uids.csv'
twitter_file_name = 'twitter_users.csv'



def get_weibo_uids():
    csvFile = open(weibo_file_name)
    cs = list(csv.reader(csvFile))
    return cs

def add_user(user):
    with open (weibo_file_name,'a+') as f:
        csvFile = csv.writer(f)
        csvFile.writerow(user)

def delete_weibo_user(uid):
    csvFile = open(weibo_file_name)
    cs = list(csv.reader(csvFile))
    for line in cs :
        if line[0] == uid:
            cs.remove(line)
    save_to_csv(cs,weibo_file_name)


def save_to_csv(list,filename):
    csvFile = open(filename, "w")
    writer = csv.writer(csvFile)
    writer.writerows(list)
    csvFile.close()


def get_twitter_users():
    csvFile = open(twitter_file_name)
    cs = list(csv.reader(csvFile))
    return cs

def add_twitter_user(user):
    with open (twitter_file_name,'a+') as f:
        csvFile = csv.writer(f)
        csvFile.writerow(user)
    

def delete_twitter_user(uid):
    csvFile = open(twitter_file_name)
    cs = list(csv.reader(csvFile))
    for line in cs :
        if line[0] == uid:
            cs.remove(line)
    save_to_csv(cs,twitter_file_name)
