from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.template import TemplateDoesNotExist, RequestContext
from django.views.generic.simple import direct_to_template #about_pages view
from django.shortcuts import render_to_response
#from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
#from django.contrib.auth.models import User
from ktool.models import *
from settings import MEDIA_ROOT
from django import forms
from django.db.models import Q
import string 
import networkx as nx
import pydot
#import pylab as pl
#import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as pl
from xml.dom import minidom

class searchform(forms.Form):
    plist=Pathway.objects.all()
    p_options=[(0,'all')]
    for a in plist:
       p_options.append(((a.id),(a.name)))
    orgname=forms.CharField(label="orgname", required=False)
#    ko=forms.BooleanField(initial=1,required=False)
#    ec=forms.BooleanField(required=False)
#option for different functions
    inputoptions=[('rlist','enter reaction IDs'),('elist',' enter EC numbers'),('klist','entre KO IDs')]
#option for different results (save or show in the next page), should do it in the result page, choose to save in
#different formats or visualize
#create a temporary file on the server for information transfer between pages    
    input=forms.ChoiceField(label="inoptions", choices=inputoptions,required=False)
    inlist=forms.CharField(label="inlist", required=False)
    pa=forms.MultipleChoiceField(label="select pathways", choices=p_options, initial=0,required=False)
    file  = forms.FileField(required=False)
#    pa=forms.ChoiceField(label="select pathways", widget=forms.SelectMultiple, choices=p_options)

def searchnew(request):
    if request.method == 'POST':
        f = searchform(request.POST,request.FILES)
        if not f.is_valid():
           err="sth wrong"
#           a=f.errors
           return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request)) #do sth else 
        else:
           errorg=[]
           orgoption = request.POST.get('orgoption','')
#           if f.cleaned_data["orgname"]:
           if orgoption=="inkegg":
                  s=f.cleaned_data["orgname"]
                  if s=="all":
                    rfile=open(MEDIA_ROOT+'reas/all.rea', 'r')
                    ids=[]
                    for s in rfile:
                      ids.append(s[:6]) #remove "/n" 
                    objs = Reaction.objects.filter(reactionid__in=ids).distinct() 
                  elif s=="all2":
                    objs = Reaction.objects.all() 
                  else:    
                    orgs=s.split(",") #network for merged organisms
                    ids=[]
                    ko = request.POST.get('ko','')
                    if ko=="on":
#                    if f.cleaned_data["ko"]:
                      for org in orgs:
                        try:
                          rfile=open(MEDIA_ROOT+'reako/'+org+'korea.txt', 'r')
                          for s in rfile:
                            ids.append(s[:6]) #remove "/n"
                        except IOError:
                            errorg.append(org) 
                    ec = request.POST.get('ec','')
                    if ec=="on":
#                    if f.cleaned_data["ec"]:
                      for org in orgs:
                        try:  
                          rfile=open(MEDIA_ROOT+'reaec/'+org+'ecrea.txt', 'r')
                          for s in rfile:
                            ids.append(s[:6]) #remove "/n" 
                        except IOError:
                            errorg.append(org) 
                    ids=list(set(ids))  #remove repeats    
                    objs = Reaction.objects.filter(reactionid__in=ids).distinct()
 #                   objs = Reaction.objects.filter(reaorg__org__in=orgs).distinct()
#           elif f.cleaned_data["input"]:
           elif orgoption=="nokegg":
              if f.cleaned_data["inlist"]:
                 s=f.cleaned_data["inlist"]
                 ids=string.split(s,",") 
              elif f.cleaned_data["file"]:
#                 s=request.FILES['file'].read()
#                 ids=s.split("\n")
                 ids=[]
                 ss=request.FILES['file'].readlines()
                 if f.cleaned_data["input"]=="rlist" or f.cleaned_data["input"]=="klist":
                   for id in ss:
                    ids.append(id[:6])
                 else:   
                   for id in ss:
                    if "\n" in id:
                        id=id[:len(id)-2]
                    ids.append(id)
              else:
                err="please enter either organism names or submit your own id lists!"
                return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))
              if ids:
                 if f.cleaned_data["input"]=="rlist":
                   objs = Reaction.objects.filter(reactionid__in=ids).distinct()
                 elif f.cleaned_data["input"]=="elist":
                   objs = Reaction.objects.filter(rea_ec__enzyme__in=ids).distinct()
                 else:
                   objs = Reaction.objects.filter(reako__ko__in=ids).distinct()
              else:
                err="please enter either organism names or submit your own id lists!"
                return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))
           else:
              err="please enter either organism names or submit your own id lists!"
              return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))
#                  objs=Reaction.objects.all()
           if objs:
             if errorg:
               errorg=list(set(errorg))
               eos=""
               for org in errorg:
                 eos=eos+org+" "
               err="organisms " +eos + "not found in database"   
               return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))
             else:
               if f.cleaned_data["pa"]:
                 if not f.cleaned_data["pa"][0]=='0':
#                 if not f.cleaned_data["pa"][0][0]=='0': #ehmn version
                   objs=objs.filter(rea_path__path__in=f.cleaned_data["pa"]).distinct()     
               request.session['r']=objs
               return render_to_response('krea.html', {'reaction_list': objs})
#               return HttpResponseRedirect(reverse('ktool.views.reaview'))
           else:
             err="no reaction found, please check the organism name or your id list and make sure to check KO based or EC based checkbox!"
             return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))
    else:
        f = searchform(request.POST)
        err="please enter either organism names or submit your own id lists!"
        return render_to_response('ksearch.html', locals(),context_instance=RequestContext(request))


def rlisttxt(request):
      rf=open(MEDIA_ROOT+'rlist.txt',"w")
      rf.write("id\treversibile\tequation\tequationid\n")
      reas=request.session['r']
      for rea in reas:
        rf.write(rea.reactionid+"\t"+rea.reversibility+"\t"+rea.equation+"\t"+rea.eq_in_id+"\n")
      rf.close()
      rlist=open(MEDIA_ROOT+'rlist.txt',"rb").read()
      response=HttpResponse(rlist, mimetype="text/plain")
      response['Content-Disposition']='attachment; filename=realist.txt'
      return response

def rlistsbml(request):
   import libsbml as ls
   doc=ls.SBMLDocument(2,1) #version 2 level 3
   m=doc.createModel() #directly give an Id than not necessary to setName
   reas=request.session['r']
   spes=[] #save species names in a list for getting a list without repeat
   cp=m.createCompartment()
   cp.setId("compartment")
   cp.setName(cp.getId())
   cp.setSize(1)
   i=1
   for rea in reas:
    r=m.createReaction() #can not use createReaction(rea[0])
    r.setId("r"+str(i))
    i=i+1
    r.setName(str(rea.reactionid))
    r.setReversible(rea.reversibility=='')
    if not 'n' in rea.eq_in_id:
      a=rea.eq_in_id.split('=')
      subs=a[0].strip().split('+')
      for sub in subs:
        b=sub.strip().split(' ') #coefficient and compound ID seperated by space
        if len(b)==1:
            sto=1.0
            species=str(b[0])[:6]
        else:
            sto=float(b[0])
            species=str(b[1])[:6]
        sr=r.createReactant() #create a new SpeciesReference object and add as a reactant
        sr.setStoichiometry(sto) # f is a float value
        sr.setSpecies(species) #a string as a species in Model's ListOfSpecies.
        if not species in spes:
            spes.append(species)
            s=m.createSpecies()
            s.setId(species)
            com=Compound.objects.get(compoundid=species)
            s.setName(str(com.name))
            s.setCompartment(cp.getId()) #s is a string for compartment
      pros=a[1].strip().split('+')
      for pro in pros:
        b=pro.strip().split(' ')
        if len(b)==1:
            sto=1.0
            species=str(b[0])[:6]
        else:
            sto=float(b[0])
            species=str(b[1])[:6]
        sr=r.createProduct() #create a new SpeciesReference object and add as a reactant
        sr.setStoichiometry(sto) # f is a float value
        sr.setSpecies(species) #a string as a species in Model's ListOfSpecies.
        if not species in spes:
            spes.append(species)
            s=m.createSpecies()
            s.setId(species)
            com=Compound.objects.get(compoundid=species)
            s.setName(str(com.name))
            s.setCompartment(cp.getId()) #s is a string for compartment
#   ls.writeSBML(doc,pname+".xml")
#   sbml=open(pname+".xml","rb").read()
   sbml=ls.writeSBMLToString(doc) #not write file on the server
   response=HttpResponse(sbml, mimetype="text/xml")
   response['Content-Disposition']='attachment; filename=reas.xml'
   return response

def stoimatrix(request):
    sf=open(MEDIA_ROOT+'stoi.txt',"w")
    sf.write("reaction id\tcompound id\tcoefficient\treversibile\n")
    reas=request.session['r']
    stoi=Rea_comp.objects.filter(reaction__in=reas)
    for s in stoi:
#        rea=Reaction.objects.get(reactionid=s.reaction.reactionid)
        sf.write(s.reaction.reactionid+"\t"+s.compound.compoundid+"\t"+s.coefficient+"\t"+s.reversibility+"\n")
    sf.close()
    stm=open(MEDIA_ROOT+'stoi.txt',"rb").read()
    response=HttpResponse(stm, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename=stoi.txt'
    return response

def complist(request):
    sf=open(MEDIA_ROOT+'compl.txt',"w")
    sf.write("KEGG id\tname\n")
    reas=request.session['r']
    stoi=Rea_comp.objects.filter(reaction__in=reas)
    compl=[]
    for s in stoi:
        c=s.compound.compoundid
        if not c in compl:
           compl.append(c)
           sf.write(c+"\t"+s.compound.name+"\n")
    sf.close()
    stm=open(MEDIA_ROOT+'compl.txt',"rb").read()
    response=HttpResponse(stm, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename=complist.txt'
    return response

def metgraph(request,method): 
    gtype='m' #met graph
    reas=request.session['r']
    if method=="mz":
        metlinks=Met_link.objects.filter(rea__in=reas)
    else:
        metlinks=Rpairs.objects.filter(rea__in=reas)
    G=nx.MultiDiGraph()
#    Gd = pydot.Dot(graph_type='digraph')
#    mls=[]
    for ml in metlinks:
        rev=ml.rea.reversibility
        rid=ml.rea.reactionid
        req=ml.rea.equation.replace(':',';') #the dot file has trouble to read ":"#        mls.append((ml.com1,ml.com2,ml.rea,rev))
#        mls.append((ml.com1.name,ml.com2.name,ml.rea,rev))
        #Creating a new list with extra columns including information from other tables
        #is one way to transfer multiple table information to the template
        #another way is to creat a function in the model class which returns a value
        #based on data from related table
        #for this case, the best way may be to include the reversibility column in the table
#        G.add_edge(ml.com1,ml.com2)
        G.add_edge(ml.com1.compoundid,ml.com2.compoundid,label=rid,URL="/kegg/rn/"+rid,tooltip=req,labeltooltip=ml.rea.eq_in_id,target='_blank')
#        G.add_edge(ml.com1.name,ml.com2.name,lable=ml.rea.reactionid)
#        e=pydot.Edge(ml.com1.name,ml.com2.name,label=ml.rea.reactionid,URL='http://www.kegg.jp/dbget-bin/www_bget?rn:'+ml.rea.reactionid)
#        e=pydot.Edge(ml.com1.name,ml.com2.name)
#        e.set_label(ml.rea.reactionid)
#        e.set_URL('http://www.kegg.jp/dbget-bin/www_bget?rn:'+ml.rea.reactionid)
#        Gd.add_edge(e)
        if rev=="T": #reversible reactions
#          G.add_edge(ml.com1,ml.com2)
          G.add_edge(ml.com2.compoundid,ml.com1.compoundid,label=rid,URL="/kegg/rn/"+rid,tooltip=req,labeltooltip=ml.rea.eq_in_id,target='_blank')
#          G.add_edge(ml.com2.name,ml.com1.name,lable=ml.rea.reactionid)
#          e=pydot.Edge(ml.com2.name,ml.com1.name)
#          e.set_label(ml.rea.reactionid)
#          e.set_URL('http://www.kegg.jp/dbget-bin/www_bget?rn:'+ml.rea.reactionid)
#          Gd.add_edge(e)
    nnode=G.number_of_nodes()
    if nnode>0:
      nlink=G.size()
      pls=nx.all_pairs_shortest_path_length(G)
      pl=[]
      for pls1 in pls.values():
          pl+=pls1.values()
      apl=float(sum(pl))/float(len(pl)-nnode) #because alsi include path length to itself which is 0.
#      al=nx.average_shortest_path_length(G) #wrong because also consider the unconnected
      UG=G.to_undirected()
 #   cluster=nx.average_clustering(UG) #not for mutiple graph
      con=nx.is_connected(UG)
      request.session['mgraph']=G
#    request.session['mgraphdot']=Gd
      return render_to_response('kglink.html', locals())
    else:
      return render_to_response('kmetlink0.html', locals())

def mgraphvisnew(request): #should read network and then set attribute for specific nodes rather than generate network again, this can not be used for differet types of graphs    GN=request.session['mgraph']
    G=nx.to_pydot(GN)
#    G.set_target('_blank')
    G.set_concentrate("true")
    if request.method == 'GET':
        f = netviewform(request.GET)
        if not f.is_valid():
           return render_to_response('kdotviewnew.html', {'form': f}) #do sth else         else:
           if f.cleaned_data["fformat"]:
              fformat=f.cleaned_data["fformat"]
           else:   
              fformat='svg'
           if f.cleaned_data["prog"]:
              prog=f.cleaned_data["prog"]
           else:   
              prog='dot'
           if not f.cleaned_data["rlabel"]:
              for e in G.get_edge_list():
                 e.set_label("")
#                 e.set_target('_blank')
                 e.set_arrowsize("0.4")
           for n in G.get_node_list():
              cid=n.get_name()
              comp=Compound.objects.get(compoundid=cid)
              cname=comp.name.replace(':',';')
              n.set_URL("/kegg/cpd/"+cid)
              n.set_tooltip(cname)
              n.set_target('_blank')
              if not f.cleaned_data["mlabel"]:
                if f.cleaned_data["size"]:
                  size=f.cleaned_data["size"]
                else:   
                  size="0.05"
                n.set_label("")                 n.set_fixedsize("1")                 n.set_height(size)                 n.set_width(size) 
#           G.set_size("600","500") #not work               G.write(MEDIA_ROOT+'mg'+prog+'.'+fformat, prog=prog,format=fformat)
           if fformat=="svg":
             svgfile=open(MEDIA_ROOT+'mg'+prog+'.svg',"r")
             xml_response = svgfile.read()
             dom = minidom.parseString(xml_response)
             svgfile.close()
             svg = dom.childNodes[3]
             w=svg.getAttribute('width').strip('pt')
             w=round(float(w)*1.4)
             h=svg.getAttribute('height').strip('pt')
             h=round(float(h)*1.4)
           else:
             w="1200"
             h="800"  
           return render_to_response("kdotviewnew.html", {'form': f,'prog':prog,'format': fformat,'w':w,'h':h})
        
def reagraph(request,method): 
    gtype='r' #rgraph
    reas=request.session['r']
    if method=="mz":
        links=Reagraphmz.objects.filter(rea1__in=reas, rea2__in=reas)
    else:
        links=Reagraphkr.objects.filter(rea1__in=reas, rea2__in=reas)
    G=nx.MultiDiGraph()
    for l in links:
        G.add_edge(l.rea1.reactionid,l.rea2.reactionid,label=l.compound.compoundid)
    nnode=G.number_of_nodes()
    if nnode>0:
      nlink=G.size()
      pls=nx.all_pairs_shortest_path_length(G)
      pl=[]
      for pls1 in pls.values():
          pl+=pls1.values()
      apl=float(sum(pl))/float(len(pl)-nnode) #because alsi include path length to itself which is 0.
      UG=G.to_undirected()
      con=nx.is_connected(UG)
      request.session['mgraph']=G #save in the same graph session so that can use the same view for metabolite, reaction and bipartite graphs
      return render_to_response('kglink.html', locals())
    else:
      return render_to_response('kmetlink0.html', locals())

def rgraphvis(request): #should read network and then set attribute for specific nodes rather than generate network again, this can not be used for differet types of graphs    GN=request.session['mgraph']    G=nx.to_pydot(GN)
#    G.set_target('_blank')    G.set_concentrate("true")
    if request.method == 'GET':
        f = netviewform(request.GET)
        if not f.is_valid():
           return render_to_response('kdotviewnew.html', {'form': f}) #do sth else         else:
           if f.cleaned_data["fformat"]:
              fformat=f.cleaned_data["fformat"]
           else:   
              fformat='svg'
           if f.cleaned_data["prog"]:
              prog=f.cleaned_data["prog"]
           else:   
              prog='dot'
           if not f.cleaned_data["mlabel"]:
              for e in G.get_edge_list():
                 e.set_label("")
#                 e.set_target('_blank')
                 e.set_arrowsize("0.4")
           for n in G.get_node_list():
              rid=n.get_name()
              n.set_URL("/kegg/rn/"+rid)
              n.set_target('_blank')
              if not f.cleaned_data["rlabel"]:
                if f.cleaned_data["size"]:
                  size=f.cleaned_data["size"]
                else:   
                  size="0.05"
                n.set_label("")                 n.set_fixedsize("1")                 n.set_height(size)                 n.set_width(size)            G.write(MEDIA_ROOT+'mg'+prog+'.'+fformat, prog=prog,format=fformat)
           if fformat=="svg":
             svgfile=open(MEDIA_ROOT+'mg'+prog+'.svg',"r")
             xml_response = svgfile.read()
             dom = minidom.parseString(xml_response)
             svgfile.close()
             svg = dom.childNodes[3]
             w=svg.getAttribute('width').strip('pt')
             w=round(float(w)*1.4)
             h=svg.getAttribute('height').strip('pt')
             h=round(float(h)*1.4)
           else:
             w="1200"
             h="800"  
           return render_to_response("kdotviewnew.html", {'form': f,'prog':prog,'format': fformat,'w':w,'h':h})

def bipgraph(request,method): 
    gtype='b' #bipartite graph
    reas=request.session['r']
    if method=="mz":
        links=Met_link.objects.filter(rea__in=reas)
    else:
        links=Rpairs.objects.filter(rea__in=reas)
    G=nx.MultiDiGraph()
    for l in links:
        G.add_edge(l.com1.compoundid,l.rea.reactionid)
        G.add_edge(l.rea.reactionid,l.com2.compoundid)
        if l.rea.reversibility=="T": #reversible reactions
           G.add_edge(l.com2.compoundid,l.rea.reactionid)
           G.add_edge(l.rea.reactionid,l.com1.compoundid)
    nnode=G.number_of_nodes()
    if nnode>0:
      nlink=G.size()
      pls=nx.all_pairs_shortest_path_length(G)
      pl=[]
      for pls1 in pls.values():
          pl+=pls1.values()
      apl=float(sum(pl))/float(len(pl)-nnode) #because alsi include path length to itself which is 0.
      UG=G.to_undirected()
      con=nx.is_connected(UG)
      request.session['mgraph']=G #save in the same graph session so that can use the same view for metabolite, reaction and bipartite graphs
      return render_to_response('kglink.html', locals())
    else:
      return render_to_response('kmetlink0.html', locals())

def bgraphvis(request): 
    GN=request.session['mgraph']    G=nx.to_pydot(GN)
    G.set_concentrate("true")    if request.method == 'GET':
        f = netviewform(request.GET)
        if not f.is_valid():
           return render_to_response('kdotviewb.html', {'form': f}) #do sth else         else:           if f.cleaned_data["fformat"]:
              fformat=f.cleaned_data["fformat"]
           else:   
              fformat='svg'
           if f.cleaned_data["prog"]:
              prog=f.cleaned_data["prog"]
           else:   
              prog='dot'
           if not f.cleaned_data["mlabel"]:
              for e in G.get_edge_list():
                 e.set_arrowsize("0.4")
           for n in G.get_node_list():
              rcid=n.get_name()
              if rcid[0]=='R':
                n.set_URL("/kegg/rn/"+rcid)
              else:
                n.set_URL("/kegg/cpd/"+rcid)
              n.set_target('_blank')
              if not f.cleaned_data["rlabel"]:
                if f.cleaned_data["size"]:
                  size=f.cleaned_data["size"]
                else:   
                  size="0.05"
                n.set_label("")                 n.set_fixedsize("1")                 n.set_height(size)                 n.set_width(size)            G.write(MEDIA_ROOT+'mg'+prog+'.'+fformat, prog=prog,format=fformat)
           if fformat=="svg":
             svgfile=open(MEDIA_ROOT+'mg'+prog+'.svg',"r")
             xml_response = svgfile.read()
             dom = minidom.parseString(xml_response)
             svgfile.close()
             svg = dom.childNodes[3]
             w=svg.getAttribute('width').strip('pt')
             w=round(float(w)*1.4)
             h=svg.getAttribute('height').strip('pt')
             h=round(float(h)*1.4)
           else:
             w="1200"
             h="800"  
           return render_to_response("kdotviewnew.html", {'form': f,'prog':prog,'format': fformat,'w':w,'h':h})

def cytoscapelayout(request,method): 
    import xmlrpclib
    server=xmlrpclib.ServerProxy("http://localhost:9000")
    networkid =server.Cytoscape.createNetwork('testnet')
    reas=request.session['r']
    if method=="mz":
        links=Met_link.objects.filter(rea__in=reas)
    else:
        links=Rpairs.objects.filter(rea__in=reas)
    startn=[]
    endn=[]
    for l in links:
        startn.append(l.com1.compoundid)
        endn.append(l.com2.compoundid)
#        if l.rea.reversibility=="T": #reversible reactions#           startn.append(l.com2.compoundid)
#           endn.append(l.com1.compoundid)
    server.Cytoscape.createEdgesFromVector(startn,endn)
#    server.Cytoscape.performLayout(networkid,'force-directed')
#    server.Cytoscape.performLayout(networkid,'Kamada-Kawai-Noweight') #text tool small
    server.Cytoscape.performLayout(networkid,'Fruchterman-Rheingold')
    server.Cytoscape.exportView(MEDIA_ROOT+"test.jpg","jpg",2.0)
    image=open(MEDIA_ROOT+'test.jpg',"rb").read()
    return HttpResponse(image, mimetype="image/jpg")

def mgraphvisold(request):
    import pydot
    G=request.session['mgraph']
#    Gd=request.session['mgraphdot']
    Gd=nx.to_pydot(G)
#    Gd.set_simplify(True)
    for n in Gd.get_node_list():
           n.set_tooltip(n.get_name()) 
    for e in Gd.get_edge_list():
           e.set_tooltip(e.get_source())
#           e.set_label('a') 
    Gd.write(MEDIA_ROOT+'mg.svg', prog='dot',format='svg')
    image=open(MEDIA_ROOT+'mg.svg',"rb").read()
    return HttpResponse(image, mimetype="image/svg+xml")
#    return render_to_response('kmetlink.html', locals())

class netviewform(forms.Form):
    foptions=[('svg','svg'),('png','png'),('gif','gif'),('jpg','jpg')]
#    foptions=[('jpg','jpg'),('png','png'),('gif','gif'),('bmp','bmp'),('svg','svg'),
#              ('dot','dot'),('eps','eps'),('ps','ps'),('pdf','pdf')]
    poptions=[('dot','dot'),('neato','neato'),('twopi','twopi'),('fdp','fdp')]
    fformat=forms.ChoiceField(label="format", choices=foptions,required=False)
    prog=forms.ChoiceField(label="program", choices=poptions,required=False)
    rlabel=forms.BooleanField(label="reaction label",required=False)
    mlabel=forms.BooleanField(label="metabolite label",required=False)
    size=forms.CharField(label="node size", help_text='0.02 to 0.5. or type 1 to fit node label', required=False)

def mgraphvis(request):
#    import pydot
    reas=request.session['r']
    metlinks=Met_link.objects.filter(rea__in=reas)
#    Gd = pydot.Dot(graph_type='digraph')
    G = pydot.Dot()
    if request.method == 'GET':
        f = netviewform(request.GET)
        if not f.is_valid():
           return render_to_response('dotviewk.html', {'form': f}) #do sth else 
        else:
           if f.cleaned_data["fformat"]:
              fformat=f.cleaned_data["fformat"]
           else:   
              fformat='svg'
           if f.cleaned_data["prog"]:
              prog=f.cleaned_data["prog"]
           else:   
              prog='dot'
           for link in metlinks:
              e=pydot.Edge(link.com1.name,link.com2.name)
              if f.cleaned_data["rlabel"]:
                 e.set_label(link.rea.reactionid)
              e.set_URL("/kegg/rn/"+link.rea.reactionid)
              e.set_tooltip(link.rea.eq_in_id)
 #             e.set_labeltooltip(link.rea.equation)
              G.add_edge(e)
           if not f.cleaned_data["mlabel"]:
              s=G.to_string()
              p=s.find('{')
              dotf=open(MEDIA_ROOT+'mg.dot', 'w')
              dotf.write(s[0:p+1])
              dotf.write('node [label=\"\\N\"];')
              nlist=G.get_node_list()
              for node in nlist:
#                 n=pydot.Node(node.get_name())
#                 n.set_label(" ") #not change at all
#                 G.add_node(n)
                 dotf.write(node.get_name())
              dotf.write(s[p+2:])
              dotf.close()
              G=pydot.graph_from_dot_file(MEDIA_ROOT+'mg.dot')
           G.write(MEDIA_ROOT+'mg.'+fformat, prog=prog,format=fformat)
#           if fformat=='svg':
#              image=open('c:/human/media/mg.svg',"rb").read()
#              return HttpResponse(image, mimetype="image/svg+xml")
#           else: 
           return render_to_response("dotviewk.html", {'form': f,'format': fformat})

    
def degreevis(request): #visualize degree distribution as colors or node sizes
#any data can be saved as node attributes, networkx can have any user defined attributes. for pydot the better to have
#seperate attribute lists (as dictionaries for easy find)
    G=request.session['mgraph']
    d=dict(request.session['degree']) #convert to a dictionary for getting the degree directly from the node name
    metlinks=Met_link.objects.filter(rea__in=reas)
    Gd = pydot.Dot(graph_type='digraph')
#    G = pydot.Dot()
    if request.method == 'GET':
        f = netviewform(request.GET)
        if not f.is_valid():
           return render_to_response('dotviewk.html', {'form': f}) #do sth else 
        else:
           if f.cleaned_data["fformat"]:
              fformat=f.cleaned_data["fformat"]
           else:   
              fformat='svg'
           prog='dot'
           edges=G.edges(data=True)
           for edge in edges:
              e=pydot.Edge(edge[0],edge[1], URL="/kegg/rn/"+edge[2]['label'])
              if f.cleaned_data["rlabel"]:
                 e.set_label(edge[2]['label'])
              G.add_edge(e)
           nodes=G.nodes()
           for node in nodes:
              if f.clean_data["size"]:
#                 size=f.cleaned_data["size"]
                 size=0.02*d[node]
                 n1=pydot.Node(node, fixedsize="1", height=size,width=size,color=d[node],URL="/kegg/rn/"+node)
              else:
                 n1=pydot.Node(node, color=d[node],URL="/kegg/rn/"+node)
              if not f.cleaned_data["mlabel"]:
                 n1.set_label(" ") #try this first
                 n2.set_label(" ") #try this first
              G.add_node(n1)
              G.add_node(n2)
           G.write(MEDIA_ROOT+'mg.'+fformat, prog=prog,format=fformat)
#           if fformat=='svg':
#              image=open('c:/human/media/mg.svg',"rb").read()
#              return HttpResponse(image, mimetype="image/svg+xml")
#           else: 
           return render_to_response("dotviewk.html", {'form': f,'s':s,'format': fformat})
    
def degdis(request,option):
    #using G.subgraph(ns) to get subgrahs
    #and G.remove_nodes_from(ns) to remove nodes
    import operator #check if this is necessary
    from numpy import arange
    G=request.session['mgraph']
#    deg=G.degree(with_lables=True) #a list or dictionary of node degrees
#maybe easy to convert dic to list
#    dorder=sorted(deg.iteritems(), key=operator.itemgetter(1), reverse=True)
    #iteritems only provide values when requested."key" tells sort based on which element
#or
    if option=="input":
      deg=G.in_degree_iter() #return an iterator directly, saving memory
      request.session['attrname']='input degree'
    elif option=="output":
      deg=G.out_degree_iter() #return an iterator directly, saving memory
      request.session['attrname']='Output degree'
    else:
#      dhis=nx.degree_histogram(G) #try this later, a list of degree frequences (0 to max)
#      d=nx.degree(G) #what is the difference between nx.degree(G) and G.degree()
      deg=G.degree_iter() #return an iterator directly, saving memory
      request.session['attrname']='all degree'
    dorder=sorted(deg, key=operator.itemgetter(1), reverse=True)
    dm=dorder[0][1]
    met, d = zip(*dorder)#to transpose a list
#or x2,y2=map(None,*dorder) #None is the function applied
    bin=arange(0,dm+2,1) #need dm+2 rather than dm+1 to get correct result
    pl.clf()
#    fig=pl.figure()
#    pl.hist(deg,bin,width=1)
    h=pl.hist(d,bin)
    ddis=list(h[0])
    pl.savefig(MEDIA_ROOT+"degreedis.png", dpi=72)
    request.session['attribute']=dorder
    return render_to_response('degree.html', locals())

def attcyto(request):
    rf=open(MEDIA_ROOT+'attribute.txt',"w")
    rf.write(request.session['attrname']+"\n")
    res=request.session['attribute']
    for n, a in res:
        rf.write(str(n)+" = "+str(a)+"\n")
    rf.close()
    f=open(MEDIA_ROOT+'attribute.txt',"rb").read()
    response=HttpResponse(f, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename=attribute.txt'
    return response

def atttxt(request):
    rf=open(MEDIA_ROOT+'attrtxt.txt',"w")
    rf.write(request.session['attrname']+"\n")
    res=request.session['attribute']
    for n, a in res:
        rf.write(str(n)+"\t"+str(a)+"\n")
    rf.close()
    f=open(MEDIA_ROOT+'attrtxt.txt',"rb").read()
    response=HttpResponse(f, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename=attrtxt.txt'
    return response
        
def centrality(request,option):
    import operator
    G=request.session['mgraph']
    if option=='closeness':
          c=nx.closeness_centrality(G.to_undirected()).iteritems() #dictionary
          request.session['attrname']='closeness centrality'
    elif option=='betweeness':
          c=nx.betweenness_centrality(G.to_undirected()).iteritems() #dictionary
          request.session['attrname']='betweenness centrality'
    elif option=='eccentracity':
          c=nx.eccentricity(G.to_undirected()).iteritems() #need connected network
          request.session['attrname']='eccentracity'
    elif option=='load':
          c=nx.load_centrality(G.to_undirected()).iteritems()
          request.session['attrname']='load centrality'
    elif option=='eigen vector':
          es=G.edges()
          G=nx.Graph()
          G.add_edges_from(es)
          c=nx.eigenvector_centrality(G).iteritems()
          request.session['attrname']='eigen vector centrality'
    elif option=='ebetweeness':
          option="edge betweeness"
          c=nx.edge_betweenness(G.to_undirected()).iteritems()
#          request.session['attrname']='edge betweenness centrality'
    else:
          option="degree"
          c=nx.degree_centrality(G.to_undirected()).iteritems() #degree centrality by default
          request.session['attrname']='degree centrality'
    c=sorted(c, key=operator.itemgetter(1), reverse=True)
    request.session['attribute']=c
    return render_to_response('centrality.html', locals())

def connectivity(request,option):
    import operator
    G=request.session['mgraph']
    if option=='wcomp':
#      networkx.connected_component_subgraphs(G) #list of graphs
      con=nx.connected_components(G.to_undirected()) #list of list of nodes
      i=1
      ns=[]
      ndis=[]
      for com in con:
          ndis.append([i,len(com)])
          for node in com:
             ns.append([node, i])
          i=i+1
      att=['Weaklyly connected components','component']
      request.session['attrname']='Weakly connected components'
    elif option=='scomp':
#      networkx.strongly_connected_component_subgraphs(G) #list of graphs
      con=nx.strongly_connected_components(G) #list of list of nodes
      i=1
      ns=[]
      ndis=[]
      for com in con:
          ndis.append([i,len(com)])
          for node in com:
             ns.append([node, i])
          i=i+1
      att=['Strongly connected components','component']
      request.session['attrname']='Strongly connected components'
    elif option=='bowtie':
      sc=nx.strongly_connected_components(G) #list of list of nodes
      gsc=sc[0]
      n=gsc[0] #select a node in GSC
      outdm=set(nx.single_source_shortest_path_length(G,n).keys())
      indm=set(nx.single_source_shortest_path_length(G.reverse(),n).keys())
      outs=list(outdm-indm) #use sets to find intersection, union and difference between two lists
      ins=list(indm-outdm)
      ns=set(G.nodes())
      isset=list(ns-(indm|outdm))
      con=[gsc,ins,outs,isset]
      i=1
      ns=[]
      ndis=[]
      for com in con:
          ndis.append([i,len(com)])
          for node in com:
             ns.append([node, i])
          i=i+1
      att=['Bow tie structure', 'subset']
      request.session['attrname']='bow tie structure'
    elif option=='indomain':
      ns=[]
      size=[]
      for n in G.nodes():
         dsize=len(nx.single_source_shortest_path_length(G.reverse(),n))
         ns.append([n,dsize])
         size.append(dsize)
#        con.append((n,len(G.predecessors(n)))) #does not work as only calculate directly linked nodes
      ns=sorted(ns, key=operator.itemgetter(1), reverse=True)
      m=ns[0][1]
      ndis=[[i,size.count(i)] for i in range(1,m+1)]
#      bin=pl.arange(1,m+2,1) #need dm+2 rather than dm+1 to get correct result
#      h=pl.hist(size,bin)
#      ndis=list(h[0])
      att=['Input domain', 'domain size']
      request.session['attrname']='Input domain'
#      request.session['attribute']=ns
#      return render_to_response('domain.html', locals())
    elif option=='outdomain':
      ns=[]
      size=[]
      for n in G.nodes():
          dsize=len(nx.single_source_shortest_path_length(G,n))
          ns.append([n,dsize])
          size.append(dsize)
#      con=nx.all_pairs_shortest_path_length(G)
      ns=sorted(ns, key=operator.itemgetter(1), reverse=True)
      m=ns[0][1]
#      met, size = zip(*ns)#to transpose a list
      ndis=[[i,size.count(i)] for i in range(1,m+1)]
      att=['Output domain', 'domain size']
      request.session['attrname']='Output domain'
    request.session['attribute']=ns
    return render_to_response('connectivity.html', locals())

def savegraph(request,option):
    G=request.session['mgraph']
    if option=='pajek':
          fname="metnet.net"
          nx.write_pajek(G, MEDIA_ROOT+fname)
    elif option=='dot':
          fname="metnet.dot"
          Gd=nx.to_pydot(G)
          Gd.write(MEDIA_ROOT+fname, prog='dot',format='raw')
    elif option=='gml':
          fname="metnet.gml"
          nx.write_gml(G, MEDIA_ROOT+fname)
    elif option=='yaml':
          fname="metnet.yaml"
          nx.write_yaml(G, MEDIA_ROOT+fname)
    else:
          fname="metnet.txt"
#          nx.write_edgelist(G, MEDIA_ROOT+fname,data=['lable']) #with reaction lable
          nx.write_edgelist(G, MEDIA_ROOT+fname,data=False)
    net=open(MEDIA_ROOT+fname,"rb").read()
    response=HttpResponse(net, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename='+fname
    return response

def opengraph(request,option): #need to be updated
    G=request.session['mgraph']
    if option=='pajek':
          fname="metnet.net"
          nx.write_pajek(G, MEDIA_ROOT+fname)
    elif option=='dot':
          fname="metnet.dot"
          Gd=nx.to_pydot(G)
          Gd.write(MEDIA_ROOT+fname, prog='dot',format='raw')
    elif option=='gml':
          fname="metnet.gml"
          nx.write_gml(G, MEDIA_ROOT+fname)
    elif option=='yaml':
          fname="metnet.yaml"
          nx.write_yaml(G, MEDIA_ROOT+fname)
    else:
          fname="metnet.txt"
          nx.write_edgelist(G, MEDIA_ROOT+fname,data=False)
    net=open(MEDIA_ROOT+fname,"rb").read()
    response=HttpResponse(net, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename='+fname
    return response
    
def keggview(request, op,id): #also show connections between neighbours
    return HttpResponseRedirect("http://www.kegg.jp/dbget-bin/www_bget?"+op+":"+id)

class pathform(forms.Form): #search shortest paths
    com1=forms.CharField(label="source compound", required=False)
    com2=forms.CharField(label="target compound", required=False)

def shortpathold(request): #search shortest paths
    from django.core.exceptions import ObjectDoesNotExist
    if request.method == 'GET':
      f = pathform(request.GET)
      if not f.is_valid():
           err="sth wrong"
           return render_to_response('kpath.html', locals()) #do sth else 
      else:
        c1id=f.cleaned_data["com1"]
        c2id=f.cleaned_data["com2"]
        try:
          c1=Compound.objects.get(compoundid=c1id)
          c2=Compound.objects.get(compoundid=c2id)
        except ObjectDoesNotExist:
          err="can not find the compound, check your id"
          return render_to_response('kpath.html', locals())
#        c1nm=c1.name #when the networkx graph use name rather than IDs
#        c2nm=c2.name
        G=request.session['mgraph']
#        if c1nm in ns and c2nm in ns:
        if c1id in G and c2id in G:
            Gd=pydot.Dot()
            p=nx.shortest_path(G,c1id,c2id)
            for i in range(len(p)-1):
#               G[p[i]][p[i+1]]['lable']
               metlinks=Met_link.objects.filter(com1=p[i]).filter(com2=p[i+1])
               n=pydot.Node(p[i])
               n.set_URL("/kegg/cpd/"+p[i])
               cn=Compound.objects.get(compoundid=p[i]).name
               n.set_tooltip(cn)
               Gd.add_node(n)
               for link in metlinks:
#                 e=pydot.Edge(link.com1.name,link.com2.name)
                 e=pydot.Edge(link.com1.compoundid,link.com2.compoundid)
                 e.set_label(link.rea.reactionid)
                 e.set_URL("/kegg/rn/"+link.rea.reactionid)
                 Gd.add_edge(e)
               metlinks=Met_link.objects.filter(com2=p[i]).filter(com1=p[i+1]) #for reversible links
               for link in metlinks:
#                 e=pydot.Edge(link.com1.name,link.com2.name)
                 if not link.rea.reversibility:
                   e=pydot.Edge(link.com1.compoundid,link.com2.compoundid)
                   e.set_label(link.rea.reactionid)
                   e.set_URL("/kegg/rn/"+link.rea.reactionid)
                   Gd.add_edge(e)
            n=pydot.Node(p[-1]) #the last one
            n.set_URL("/kegg/cpd/"+p[-1])
            cn=Compound.objects.get(compoundid=p[i]).name
            n.set_tooltip(cn)
            Gd.add_node(n)
            Gd.write(MEDIA_ROOT+'path.svg', prog="dot",format="svg")
            image=open(MEDIA_ROOT+'path.svg',"rb").read()
            return HttpResponse(image, mimetype="image/svg+xml")
        else:
            err="the compounds are not in the network"
            return render_to_response('kpath.html', locals())

def shortpath(request): #avoid repeat reaction using breadth first search, paths as list of edges with rea data
#    from django.core.exceptions import ObjectDoesNotExist
#    fps2=open("c:/human/media/ps2.txt","w") for debuging
    if request.method == 'GET':
      f = pathform(request.GET)
      if not f.is_valid():
           err="sth wrong"
           return render_to_response('kpath.html', locals()) #do sth else 
      else:
        c1id=f.cleaned_data["com1"]
        c2id=f.cleaned_data["com2"]
        G=request.session['mgraph']
        n=G.nodes()
        flag=0
        paths=[]
#        t=a
        if c1id=="" or c2id=="":
            err="Please input the source and targe compound IDs"
            return render_to_response('kpath.html', locals())
        elif c1id in G and c2id in G:
          if c1id==c2id:
            err="The source compound is the same with the targe compound, the path length is zero."
            return render_to_response('kpath.html', locals())
          else:    
            ns=[c1id]+G.neighbors(c1id) #ns for searched nodes
#            t=a
            if c2id in ns:
              rs=G.get_edge_data(c1id,c2id).values()
              for r in rs:
                path=[[c1id,c2id,r['label']]]
                paths.append(path)
            else:    
              ps=G.out_edges(c1id,data=True) #just use ps
              ps2=[] #for the searched paths
              for p in ps:
                ps2.append([[p[0],p[1],p[2]['label']]])  #change from a list of turple to a list of list
#              x=y  
              while flag==0: #stop after find a path to c2, may be two or more paths
                newnodes=[] #for new nodes from all the existing nodes, while "newns" for new nodes from a specific node
                newps=[] #new paths to be found
                for p in ps2:
#                  newns=G.neighbors(p[-1][1])
#                  newnodes=newnodes+newns
                  newedges=G.out_edges(p[-1][1],data=True)
                  if newedges: #possible no out links
                    for ne in newedges:
                      r=ne[2]['label']
                      if not r==p[-1][2]:
                        if ne[1]==c2id:
                          path=p+[[ne[0],ne[1],r]] #list of list
                          paths.append(path)
                          flag=1
                        elif not ne[1] in ns: #neighbours may include nodes already found (loop)
                          newp=p+[[ne[0],ne[1],r]] #list of list
                          newps.append(newp)
                          if not ne[1] in newnodes:
                            newnodes.append(ne[1])
#                  if c2id in newns: #need to check, as maybe target in newns but not change flag as repeat rea. just do a through search
#                      rs=G.get_edge_data(p[-1][1],c2id).values()
#                      for r in rs:
#                        if not r['lable']==p[-1][2]:
#                          flag=1
#                          path=p+[[p[-1][1],c2id,r['lable']]] #list of list
#                          paths.append(path)
#                  else:
                ps2=newps
#                fps2.write(str(ps2)+"\n")
                if newnodes: 
                   ns=ns+newnodes #change ns after searching for all nodes to find multiple shortest paths
                elif flag==0: #no more new nodes found and no path found
                   err="No directed path found from "+c1id+" to "+c2id
                   return render_to_response('kpath.html', locals()) #do sth else 
            Gd=pydot.Dot()
#            fps2.close()
            edges=[] #to avoid repeat edges
            nodes=[]
            for p in paths: #should keep rea info in paths, then will not need to search metlink again.
              for e in p:
                if not e in edges:
                  edges.append(e)
                  rea=Reaction.objects.get(reactionid=e[2])
                  edot=pydot.Edge(e[0],e[1],label=e[2],tooltip=rea.eq_in_id,labeltooltip=rea.equation)
                  edot.set_URL("/kegg/rn/"+e[2])
                  Gd.add_edge(edot)
                  if not e[0] in nodes:
                    nodes.append(e[0]) 
                    node=pydot.Node(e[0],URL="/kegg/cpd/"+e[0])
                    cname=Compound.objects.get(compoundid=e[0]).name
                    node.set_label(cname)
#                    node.set_tooltip(cname)
                    Gd.add_node(node)
#            nodes.append(c2id) 
            node=pydot.Node(c2id,URL="/kegg/cpd/"+c2id)
            cname=Compound.objects.get(compoundid=c2id).name
            node.set_label(cname)
#            node.set_tooltip(cname)
            Gd.add_node(node)
            Gd.write(MEDIA_ROOT+'path.png', prog="dot",format="png")
            Gd.write(MEDIA_ROOT+'path.svg', prog="dot",format="svg")
            image=open(MEDIA_ROOT+'path.svg',"rb").read()
            return HttpResponse(image, mimetype="image/svg+xml")
        else:
            err="one of the compound or both are not in the network, please check the ids and make sure they are in your generated network."
            return render_to_response('kpath.html', locals())

#These views should be login controlled
def reacomp(request): #used to update the rea-com table (derived table) based on the reaction equations. activated by
    #call http://csb.inf.ed.ac.uk/kneva/update/reacomp
#   reas=Reaction.objects.filter(excluded__isnull=True) #not work
   reas=Reaction.objects.all()
   Rea_comp.objects.all().delete() #clear contents in the table
   i=1
   for rea in reas:
    if rea.excluded=="": #some polymer reactions are excluded
     r=rea.reactionid 
     if not 'n' in rea.eq_in_id: #not for reactions with coefficients n, n+1, etc
       a=rea.eq_in_id.split('=')
       subs=a[0].strip().split('+')
       for sub in subs:
         b=sub.strip().split(' ') #coefficient and compound ID seperated by space
         if len(b)==1:
            sto=-1.0
            species=str(b[0])
         else:
            try:
               sto=-float(b[0])
            except ValueError:
               sto=-1.0
            species=str(b[1])
         rc=Rea_comp()
         rc.id=i
         rc.reaction=rea
         rc.compound=Compound.objects.get(compoundid=species)
         rc.coefficient=str(sto)
         rc.reversibility=rea.reversibility
         rc.save()
         i=i+1
       pros=a[1].strip().split('+')
       for pro in pros:
         b=pro.strip().split(' ')
         if len(b)==1:
            sto=1.0
            species=str(b[0])
         else:
            try:
               sto=-float(b[0])
            except ValueError:
               sto=1.0
            sto=float(b[0])
            species=str(b[1])
         rc=Rea_comp()
         rc.id=i
         rc.reaction=rea
         rc.compound=Compound.objects.get(compoundid=species)
         rc.coefficient=str(sto)
         rc.reversibility=rea.reversibility
         rc.save()
         i=i+1
   return render_to_response('thanks.html', locals())

def addreversibility(request): #used to add reversibility information in the metlink and rpair table activated by
   #call http://csb.inf.ed.ac.uk/kneva/update/reversibility 
   mls=Met_link.objects.all()
   for ml in mls:
     res=ml.rea.reversibility
     ml.reversibility=res 
     ml.save()
   mls=Rpairs.objects.all()
   for ml in mls:
     res=ml.rea.reversibility
     ml.reversibility=res 
     ml.save()
   return render_to_response('thanks.html', locals())

def reagraph1mz(request): #used to create reaction graph based on metlinks. activated by
   #call http://csb.inf.ed.ac.uk/kneva/update/rgraphmz 
   Reagraphmz.objects.all().delete() #clear contents in the table
   mls=Met_link.objects.all()
   i=1
   clist=[] #save the processed compounds
   for ml in mls:
     res=ml.rea.reversibility
     com=ml.com1
     if not com.compoundid in clist:
       clist.append(com.compoundid)
       rin=Met_link.objects.filter(com1=com)
       rres1=rin.filter(reversibility="T") #reversible links
#       rin=rin.filter(reversibility="F")
       rout=Met_link.objects.filter(com2=com)
       rres2=rout.filter(reversibility="T")
#       rout=rout.filter(reversibility="F")
       for r2 in rin:
         for r1 in rout:
           rg=Reagraphmz()
           rg.id=i
           rg.rea1=r1.rea
           rg.rea2=r2.rea
           rg.compound=com # or ml.com1
           rg.save()
           i=i+1
       for r2 in rin:
         for r1 in rres1:
           if r1!=r2:
             rg=Reagraphmz()
             rg.id=i
             rg.rea1=r1.rea
             rg.rea2=r2.rea
             rg.compound=com # or ml.com1
             rg.save()
             i=i+1
       for r2 in rres2:
         for r1 in rout:
           if r1!=r2:
             rg=Reagraphmz()
             rg.id=i
             rg.rea1=r1.rea
             rg.rea2=r2.rea
             rg.compound=com # or ml.com1
             rg.save()
             i=i+1
   return render_to_response('thanks.html', locals())

def reagraph2kr(request): #used to create reaction graph based on rpairs. activated by
   #call http://csb.inf.ed.ac.uk/kneva/update/rgraphkr 
   Reagraphkr.objects.all().delete() #clear contents in the table
   mls=Rpairs.objects.all()
   i=1
   clist=[] #save the processed compounds
   for ml in mls:
     res=ml.rea.reversibility
     com=ml.com1
     if not com.compoundid in clist:
       clist.append(com.compoundid)
       rin=Rpairs.objects.filter(com1=com)
       rres1=rin.filter(reversibility="T") #reversible links
#       rin=rin.filter(reversibility="F")
       rout=Rpairs.objects.filter(com2=com)
       rres2=rout.filter(reversibility="T")
#       rout=rout.filter(reversibility="F")
       for r2 in rin:
         for r1 in rout:
           rg=Reagraphkr(id=i,rea1=r1.rea,rea2=r2.rea,compound=com)
           rg.save()
           i=i+1
       for r2 in rin:
         for r1 in rres1:
           if r1!=r2:
             rg=Reagraphkr(id=i,rea1=r1.rea,rea2=r2.rea,compound=com)
             rg.save()
             i=i+1
       for r2 in rres2:
         for r1 in rout:
           if r1!=r2:
             rg=Reagraphkr(id=i,rea1=r1.rea,rea2=r2.rea,compound=com)
             rg.save()
             i=i+1
   return render_to_response('thanks.html', locals())

def fba(request): #flux balance analysis for a set of reactions
    sf=open(MEDIA_ROOT+'stoi.txt',"w")
    reas=request.session['r']
    stoi=Rea_comp.objects.filter(reaction__in=reas)
    for s in stoi:
        rea=Reaction.objects.get(reactionid=s.reaction.reactionid)
        sf.write(rea.reactionid+"\t"+s.compound.compoundid+"\t"+s.coefficient+"\t"+rea.reversibility+"\n")
    sf.close()
    stm=open(MEDIA_ROOT+'stoi.txt',"rb").read()
    response=HttpResponse(stm, mimetype="text/plain")
    response['Content-Disposition']='attachment; filename=stoi.txt'
    return response

def reaview(request):
    reaction_list=request.session['r']
    return render_to_response('krea.html', locals())
    
def search(request):
    if request.method == 'POST':
        f = searchform(request.POST,request.FILES)
        if not f.is_valid():
           err="sth wrong"
           return render_to_response('ksearch.html', locals()) #do sth else 
        else:
           if f.cleaned_data["orgname"]:
                  s=f.cleaned_data["orgname"]
                  if s=="all":
                    objs = Reaction.objects.filter(reaorg__reaction__reactionid__startswith="R").distinct() #all reas in the rea-org list
#                    objs = Reaction.objects.filter(reaorg__reaction__contains="R") #all reas in the rea-org list
                  else:    
                    orgs=string.split(s,",") #network for merged organisms
                    objs = Reaction.objects.filter(reaorg__org__in=orgs).distinct()
           elif f.cleaned_data["input"]:
              if f.cleaned_data["inlist"]:
                 s=f.cleaned_data["inlist"]
                 ids=string.split(s,",") 
              elif f.cleaned_data["file"]:
#                 s=request.FILES['file'].read()
#                 ids=s.split("\n")
                 ids=[]
                 ss=request.FILES['file'].readlines()
                 if f.cleaned_data["input"]=="rlist" or f.cleaned_data["input"]=="klist":
                   for id in ss:
                    ids.append(id[:6])
                 else:   
                   for id in ss:
                    if "\n" in id:
                        id=id[:len(id)-2]
                    ids.append(id)
              else:
                err="please enter organism names or submit your own id list!!"
                return render_to_response('ksearch.html', locals())
              if ids:
                 if f.cleaned_data["input"]=="rlist":
                   objs = Reaction.objects.filter(reactionid__in=ids)
                 elif f.cleaned_data["input"]=="elist":
                   objs = Reaction.objects.filter(rea_ec__enzyme__in=ids)
                 else:
                   objs = Reaction.objects.filter(reako__ko__in=ids)
              else:
                err="please enter organism names or submit your own id list!!!"
                return render_to_response('ksearch.html', locals())
           else:
              err="please enter organism names or submit your own id list!"
              return render_to_response('ksearch.html', locals())
#                  objs=Reaction.objects.all()
           if objs:
             if f.cleaned_data["pa"]: 
               if not f.cleaned_data["pa"][0][0]=='0':
                 objs=objs.filter(rea_path__path__in=f.cleaned_data["pa"]).distinct()     
             request.session['r']=objs
             return render_to_response('krea.html', {'reaction_list': objs})
           else:
             err="no reaction found, please check the organism name or your id list!"
             return render_to_response('ksearch.html', locals())
    else:
        f = searchform(request.POST)

        err="please enter organism names or your id list!"
        return render_to_response('ksearch.html', locals())
#generate reaction list for selected organism or selected pathways
#save as text file or SBML, save stoichiometric matrix for flux analysis (later as require determine external mets)
#generate met-link list and visualize
#options for rea list 

class compareform(forms.Form):
    orgname1=forms.CharField(label="orgname", required=False)
    orgname2=forms.CharField(label="orgname", required=False)
    orgname3=forms.CharField(label="orgname", required=False)
    inputoptions=[('rlist','submit a reaction list'),('elist','submit an EC number list'),('klist','submit a KO list')]
    input1=forms.ChoiceField(label="inoptions", choices=inputoptions,required=False)
    file1  = forms.FileField()
    input2=forms.ChoiceField(label="inoptions", choices=inputoptions,required=False)
    file2  = forms.FileField()
    input3=forms.ChoiceField(label="inoptions", choices=inputoptions,required=False)
    file3  = forms.FileField()

def compare(request):
    if request.method == 'POST':
        f = compareform(request.POST,request.FILES)
        if not f.is_valid():
           return render_to_response('kcomp.html', locals()) #do sth else 
        else:
           if f.cleaned_data["orgname1"]:
                  org=f.cleaned_data["orgname1"]
#                  objs1 = Reaction.objects.filter(reaorg__org=org)
                  objs1 = Reaction.objects.filter(reaorg__org=org).values_list('id', flat=True) #return a list of IDs
 #                 .distinct()
           elif f.cleaned_data["input1"]:
               if f.cleaned_data["file1"]:
                 ids1=[]
                 ss=request.FILES['file1'].readlines()
                 for id in ss:
                    if "\n" in id:
                        id=id[:len(id)-2]
                    ids1.append(id)
#                 ids=string.split(s,"\n")
               else:
                err="please enter organism names or submit your own id list!!"
                return render_to_response('ksearch.html', locals())
               if ids1:
                 if f.cleaned_data["input1"]=="rlist":
     #              objs1 = Reaction.objects.filter(reactionid__in=ids)
                   objs1 = ids1 #the list
                 elif f.cleaned_data["input1"]=="elist":
                   objs1 = Reaction.objects.filter(rea_ec__enzyme__in=ids).values_list('id', flat=True)
                 else:
                   objs1 = Reaction.objects.filter(reako__ko__in=ids).values_list('id', flat=True)
               else:
                err="please enter organism names or submit your own id list!!!"
                return render_to_response('ksearch.html', locals())
           else:
              err="please enter organism names or submit your own id list!"
              return render_to_response('ksearch.html', locals())
#                  objs=Reaction.objects.all()
           if objs:
             request.session['r']=objs
             return render_to_response('krea.html', {'reaction_list': objs})
           else:
             err="no reaction found, please check the organism name or your id list!"
             return render_to_response('ksearch.html', locals())
    else:
        f = searchform(request.POST)
        err="please enter organism names or your id list!"
        return render_to_response('ksearch.html', locals())

