import tables
import os
import astro.pyvpfit
from astro.utilities import Struct
from matplotlib.mlab import rec_append_fields
import matplotlib.pyplot as pl
import calc_group
import glob
import numpy as np
import cPickle
#from enthought.mayavi import mlab

# run this command to mount the cosmas disk on /media/cosma
#   sshfs nhmc@charon.dur.ac.uk:/ /media/cosma

Ckms = 3.0e5
GAL_MASS_CUT = 0.5 # lower cutoff for galaxies to include in triple
                   # groups, in units of 10**10 stellar masses
MIN_NHI_CUT = 13.5 # minimum log10(column density) to include line in
                   # triple groups.

def readh5(name, nsp, safe=11.0, debug=False):
    """ Read in GIMIC short spectra info from an hdf5 file.
    
    Safe is safe distance for statistics from the spectrum midpoint. 
    """
    # read in the x and y positions of the spectra
    #print 'Opening hdf5 file'
    picklename = name.replace('/','_') + '.pickle'
    try:
        fh = open('pickled/' + picklename)
    except IOError:
        print 'No pickled file found: reading from cosma disk'
    else:
        print 'Reading pickled info'
        h5info = cPickle.load(fh)
        fh.close()
        print 'finished reading pickled info'
        return h5info

    if debug:
        print 'Reading data from %s' % name
    fh = tables.openFile(name)
    Om = fh.getNodeAttr('/Header','Omega0')
    Ol = fh.getNodeAttr('/Header','OmegaLambda')
    if nsp is None:
        nsp = fh.getNodeAttr('/Parameters/SpecWizardRuntimeParameters',
                             'NumberOfSpectra')
    nspec = range(1,nsp+1)
    htot = [fh.getNode('/Spectrum%s/h1/LogTotalIonColumnDensity' % i).read()
            for i in nspec]
    x = [fh.getNodeAttr('/Spectrum%s' % i,'X-position') for i in nspec]
    y = [fh.getNodeAttr('/Spectrum%s' % i,'Y-position') for i in nspec]
    z = [fh.getNodeAttr('/Spectrum%s' % i,'Z-position') for i in nspec]
    thetas = [fh.getNodeAttr('/Spectrum%s' % i,'theta') for i in nspec]
    phis = [fh.getNodeAttr('/Spectrum%s' % i,'phi') for i in nspec]
    for i in range(len(x)):
        if (thetas[i], phis[i]) == (0,0):
            zmid = z[i]
        elif (thetas[i], phis[i]) == (90,0):
            xmid = x[i]
        elif (thetas[i], phis[i]) == (90,90):
            ymid = y[i]
   
    #taus = [fh.getNode('/Spectrum%s/h1/OpticalDepth' % i).read() for i in nspec]
    x,y,z,htot,thetas,phis = map(np.array,(x,y,z,htot,thetas,phis))
    vel = fh.getNode('/VHubble_KMpS').read()
    redshift = fh.getNodeAttr('/Header','Redshift')
    vmid = vel.mean()
    
    scalefac = 1 / (1+redshift)
    Hz = 100.0 * np.sqrt(Om*(1+redshift)**3 + Ol)
    # convert velocity scale to distance scale
    dist = vel / (Hz * scalefac)               # v = Hd      (= HaX)
    xv = dist + xmid - dist[vel.searchsorted(vmid)]
    yv = dist + ymid - dist[vel.searchsorted(vmid)]
    zv = dist + zmid - dist[vel.searchsorted(vmid)]
    i0,i1 = zv.searchsorted([zmid - 11.0, zmid + 11.0])
    safedv = 11.0 * Hz / (1+redshift)
    h5info = Struct(x=x, y=y, z=z, theta=thetas, phi=phis,
                    xv=xv, yv=yv, zv=zv, xmid=xmid, ymid=ymid, zmid=zmid,
                    redshift=redshift, vel=vel, isafe=(i0,i1),# tau=taus,
                    safedv=safedv, number=np.array(nspec),
                    Om=Om, Ol=Ol, logntot=htot)
    if debug: print 'Finished reading data'
    fh.close()
    print 'pickling to pickled/%s to avoid having to read it later' % picklename
    fh = open('pickled/' + picklename, 'wb')
    cPickle.dump(h5info, fh, protocol=2)
    fh.close()
    return h5info

def readgals(filename, redshift, xmid, ymid, zmid, Om, Ol):
    """ Read in the info about galaxies."""
    #read in galaxies
    localname = filename.split('/')[-1]
    print localname
    try:
        temp = np.loadtxt('gals/' + localname, unpack=1)
    except IOError:
        print 'local file %s not present, trying remote version' % localname
        temp = np.loadtxt(filename, unpack=1)
    gals = np.rec.fromarrays(temp,names='x,y,z,vx,vy,vz,mstel,mtot')
    # the z positions pf galaxies, including offsets due to z velocities
    Hz = 100.0 * np.sqrt(Om*(1.+redshift)**3 + Ol)
    scalefac = 1. / (1.+redshift)
    # divide by scale factor = 1/(1+z) to get comoving distances from proper
    galxv = gals.x + gals.vx / (Hz * scalefac)
    galyv = gals.y + gals.vy / (Hz * scalefac)
    galzv = gals.z + gals.vz / (Hz * scalefac)
    gals =  rec_append_fields(gals,['xv','yv','zv'],[galxv,galyv,galzv])
    # now need proper distance  (for v = Hr relation)
    dv = scalefac * (gals.xv-xmid) * Hz
    redshiftx = redshift + dv / Ckms * (1 + redshift)
    dv = scalefac * (gals.yv-ymid) * Hz
    redshifty = redshift + dv / Ckms * (1 + redshift)
    dv = scalefac * (gals.zv-zmid) * Hz
    redshiftz = redshift + dv / Ckms * (1 + redshift)
    gals = rec_append_fields(gals, ['redshiftx','redshifty','redshiftz'],
                             [redshiftx, redshifty, redshiftz])
    return gals

def readmodels(dirname,nspec):
    """ Reads in the f26 model info from a directory.
    """
    f26 = []
    for n in nspec:
        #print 'reading', 'h1', 'from', dirname
        filename = 'runA_h1_%03i.f26' % n
        #print 'reading', filename
        vp = astro.pyvpfit.VpfitModel()
        try:
            vp.readf26(os.path.join(dirname,filename))
        except:
            print 'skipping %s' % os.path.join(dirname,filename)
        f26.append(vp)

    return f26
    

def select_lines(lines, redshift, safedv, debug=False):
    """ Select lines from the f26 model linelists that we can compare
    to lines in the FOS spectrum.
    """
    # LogN cut and safe region cuts here
    # only select lines that have logN > 13, b < 80, berr < 0.4*b
    vel = (lines.z - redshift) / (1+redshift) * Ckms
    c0 = (-safedv < vel) & (vel < safedv)
    #c1 = lines.logn > 13.5
    c1 = lines.logn > 12.5
    c2 = lines.lognerr < 0.3
    c3 = lines.berr/lines.b < 0.4
    c4 = lines.b < 100.
    c5 = (lines.berr > 0) & (lines.zerr > 0) & (lines.lognerr > 0)
    temp = lines[c0 & c1 & c2 & c3 & c4 & c5]
    if debug:  print 'Rejected', len(lines) - len(temp), 'of', len(lines)
    return temp

def tripinfo(h5, gals0, mincol=MIN_NHI_CUT):
    """ Group the LOS triples, add to them all the FOS-detectable
    absorbers and galaxies within some detectable range of the triple.
    Then find the number of groups."""
    
    trips = []
    for i in range(len(h5.x)/3):
        if None in [h5.f26[3*i+j].lines for j in range(3)]:
            print 'skipping %i-%i' % (3*i,3*i+3)
            continue
        trip = Struct(x=h5.x[3*i:3*i+3], y=h5.y[3*i:3*i+3], z=h5.z[3*i:3*i+3],
                      theta=h5.theta[3*i], phi=h5.phi[3*i],
                      ind=h5.number[3*i:3*i+3]-1 )
        trip.xc = trip.x.mean()
        trip.yc = trip.y.mean()
        trip.zc = trip.z.mean()        
        trip.name = ['Spectrum%s' % (i+1) for i in trip.ind]
        trips.append(trip)

    for t in trips:
        # 3. select absorbers above some column density or equivalent
        # width limit (also that are inside the safe region!).
        t.la = select_lines(h5.f26[t.ind[0]].lines, h5.redshift, h5.safedv)
        t.lb = select_lines(h5.f26[t.ind[1]].lines, h5.redshift, h5.safedv)
        t.lc = select_lines(h5.f26[t.ind[2]].lines, h5.redshift, h5.safedv)
        # select gals within 2 Mpc of the sight-line 'centre'.
        if (t.theta,t.phi) == (0,0):
            sep = np.hypot(gals0.x - t.xc, gals0.y - t.yc)
        elif (t.theta,t.phi) == (90,0):
            sep = np.hypot(gals0.y - t.yc, gals0.z - t.zc)
        elif (t.theta,t.phi) == (90,90):
            sep = np.hypot(gals0.x - t.xc, gals0.z - t.zc)
        gals1 = gals0[sep < 2.0]
        #print len(gals1), ' gals nearby'
        t.neargals = gals1
        if len(gals1) > 0:
            # 5. run group-finding routine  real
            # lines + gals
            laz = t.la.z[t.la.logn > mincol]
            lbz = t.lb.z[t.lb.logn > mincol]
            lcz = t.lc.z[t.lc.logn > mincol]
            if (t.theta,t.phi)   == (0,  0):  gredshift = gals1.redshiftz
            elif (t.theta,t.phi) == (90, 0):  gredshift = gals1.redshiftx
            elif (t.theta,t.phi) == (90,90):  gredshift = gals1.redshifty
            temp = calc_group.find_groups(gredshift,laz,lbz,lcz,vdiff=1000)
            t.groups1000 = list(temp)
            t.ngroups = calc_group.nabs_group(temp)
        else:
            t.groups1000 = []
            t.ngroups = (0,0,0,0)

    return trips
 
def randtrip(triples, safedv, redshift, nran=1):
    """ Generate triples populated with random lines. This is slow."""
    # find line density
    numlines = []
    for t in triples:
        numlines.append(len(t.la))
        numlines.append(len(t.lb))
        numlines.append(len(t.lc))
    
    lines_per_spec = round(sum(numlines) / float(3*len(triples)))
    rsets = []
    # total lines is sum(numlines). so for all spectra, should have
    # this many lines spread around them.
    for i in range(nran):
        if not i % 50:  print i
        # populate the spectra with random lines
        n = np.random.poisson(sum(numlines))
        rtriples = [Struct() for j in range(len(triples))]
        for t in rtriples:
            t.laz = []
            t.lbz = []
            t.lcz = []
        numran = 0
        while numran < n:
            # pick spectrum into which to put line
            ind0 = np.random.randint(0,len(triples))
            ind1 = np.random.randint(0,3)
            vel = safedv * 2 * (np.random.rand()-0.5)
            ranz = vel / Ckms * (1+redshift) + redshift
            keys = ['laz','lbz','lcz']
            getattr(rtriples[ind0], keys[ind1]).append(ranz)
            numran += 1
        
        for r,t in zip(rtriples,triples):
            r.neargals = t.neargals
            if len(r.neargals) > 0:
                if (t.theta, t.phi)   == (0,  0):
                    gredshift = r.neargals.redshiftz
                elif (t.theta, t.phi) == (90, 0):
                    gredshift = r.neargals.redshiftx
                elif (t.theta, t.phi) == (90,90):
                    gredshift = r.neargals.redshifty
                temp = calc_group.find_groups(gredshift,r.laz,r.lbz,r.lcz,
                                              vdiff=1000)
                r.groups1000 = list(temp)
                r.ngroups = calc_group.nabs_group(temp)
            else:
                r.groups1000 = []
                r.ngroups = (0,0,0,0)
        rsets.append(rtriples)
    return rsets

def calc_voff(trip):
    """ find the vel offsets between galaxies and absorebrs in groups."""
    realres = Struct()
    realres.deltavs = []
    realres.deltavspergal = []
    ngal = 0
    for t in trip:
        if t.groups1000 == []:  continue
        for g in t.groups1000:
            for k in 'abc':
                if g[k] != []:
                    ngal += 1
                    realres.deltavspergal.append(zip(*g[k])[1])
                    realres.deltavs.extend(zip(*g[k])[1])
    return realres

def calc_randvoff(rsets):
    """ find the vel offsets between galaxies and absorbers in groups
    for a set of random absorbers."""
    ranres = Struct()
    ranres.deltavs = []
    ranres.deltavspergal = []
    for rset in rsets:
        for r in rset:
            if r.groups1000 == []:  continue
            for g in r.groups1000:
                for k in 'abc':
                    if g[k] != []:
                        ranres.deltavspergal.append(zip(*g[k])[1])
                        ranres.deltavs.extend(zip(*g[k])[1])
    return ranres
    
def plotvoff(voffreal,voffrand,region,redshift,isnap):
    # plot voffsets
    pl.cla()
    vals,bins = np.histogram(voffreal.deltavs,bins=np.arange(-8000,8100,100),
                             new=True)
    pl.bar(bins[:-1], vals, width=bins[1]-bins[0], alpha=0.5,
           label='real absorbers')
    vals,bins = np.histogram(voffrand.deltavs,bins=np.arange(-8000,8100,100),
                             new=True)
    pl.bar(bins[:-1], vals/1000., width=bins[1]-bins[0], alpha=0.5,
           fc='r',label='random absorbers')
    pl.legend()
    pl.xlabel('Velocity offsets between galaxies and nearby HI $N>10^{13.5}$ cm$^{-2}$ absorbers')
    pl.title('%s, z = %.3f, snap %s' % (region,redshift,isnap))

def process_snap(prefix1, prefix2, region, isnap, mcut=GAL_MASS_CUT,
                 debug=True):
    """ Read in a Specwizard output file, identify triples, the
    galaxies near them, and find lines identified along the
    triples. Measure the velocity offsets between lines and nearby
    galaxies. Measure the number of galaxy-absorber groups in each
    triple.

    `mcut` is the lower cutoff for galaxy masses in units of 10**10
    stellar masses.

    Finally generate sets of triples with randomly placed lines, and
    measure the same quatities as for the real absorbers.
    """
    dirname = os.path.join(prefix1,prefix2,region,'snapshot_'+isnap)
    h5name, = glob.glob(dirname + '/Spectrum.Part_los*.hdf5')
    h5 = readh5(h5name,None,debug=debug)
    dirname = os.path.join('/home/nhmc/projects/gimic/vpfitting',
                           prefix2,region,'snapshot_'+isnap,'h1')
    # now read in line info (TODO? spectrum too)
    f26 = readmodels(dirname, h5.number)
    h5.f26 = f26

    # read in the galaxies and work out their redshift
    temp = 'gals_LoRes_%s_%s.txt' % (region, 'snap'+str(int(isnap)))
    galname = os.path.join(prefix1, 'LoRes/gals', temp)
    gals = readgals(galname, h5.redshift, h5.xmid, h5.ymid, h5.zmid,
                    h5.Om, h5.Ol)

    # 2. remove gals below some mass limit (to give a similar limit to
    # the gals we can see in obs, stellar mass > 5e9 solar masses)
    gals0 = gals[gals.mstel > mcut]

    trips = tripinfo(h5, gals0)
    # generate random lines and rerun group finding algorithm
    # on random lines + gals.

    rsets = randtrip(trips, h5.safedv, h5.redshift)

    # calculate voffsets
    voffreal = calc_voff(trips)
    voffrand = calc_randvoff(rsets)
    voff = Struct(real=voffreal, rand=voffrand)
    return h5,gals,trips,rsets,voff

def countgroups(trips, rsets):
    """ For each triple, number of gals with 3 associated absorbers."""
    n3real = [t.ngroups[3] for t in trips]
    n3ran = np.zeros(len(n3real))
    countn = [0]*len(n3real)
    for r in rsets:
        temp = np.array([t.ngroups[3] for t in r])
        for i in range(len(temp)):
            if temp[i] >= n3real[i]:
                countn[i] +=1
        n3ran += temp
        #print n3ran
        #raw_input()
    # We want the average number of gal3los groups per trip over all the
    # random sets
    n3ran /= float(len(rsets))

    return Struct(real=n3real,rand=n3ran)

def plotgals_3d(gals,trips,redshift):
    fig = mlab.figure(bgcolor=(0,0,0))
    #mlab.points3d(gals.x,gals.y,gals.z,mode='point')
    mlab.points3d(gals.x,gals.y,gals.z)
    pts = mlab.quiver3d(gals.x,gals.y,gals.z,gals.vx,gals.vy,gals.vz)

    mlab.axes()
    mlab.outline()
    #gals0 = gals[gals.mstel>10]
    #mlab.points3d(gals0.x,gals0.y,gals0.z,np.log10(gals0.mstel),
    #              color=(1,0,0),opacity=0.5)
    gals1 = gals[gals.mtot>200]
    #pts = mlab.points3d(gals1.x,gals1.y,gals1.z,np.log10(gals1.mtot),
    #              color=(1,0,0),opacity=0.5)
    pts = mlab.points3d(gals1.x,gals1.y,gals1.z,(gals1.mtot)**(1/3.)/10.,
                  color=(1,0,0),opacity=0.4)
    pts.glyph.glyph.clamping = False
    zvals = gals.z.min(), gals.z.max()
    for t in trips:
        for x,y in zip(t.x,t.y):
            mlab.plot3d([x,x],[y,y],zvals,color=(0,0,1),tube_radius=None)
    mlab.title('z=%.2f' % redshift)

def plotgals(gals, trips, n3, redshift):
    a = pl.gca()
    a.set_xlabel('x')
    a.set_ylabel('y')
    a.axis('equal')
    #z0 = gals.z.mean()
    #zr = gals.z.ptp()
    #c0 = (gals.z < z0+1) & (gals.z > z0-1)
    #plot(gals.x[c0],gals.y[c0],',',alpha=0.7)
    #c1 = (gals.z < z0+5) & (gals.z > z0-5)
    #a.plot(gals.x[c1],gals.y[c1],',',alpha=0.7)
    a.plot(gals.x,gals.y,',k',alpha=0.3)
    for n,t in zip(n3,trips):
        if t.theta > 0 or t.phi > 0:
            continue
        if n > 0:
            a.plot(t.x,t.y,'o',mfc='w',mew=1,ms=7)
        else:
            a.plot(t.x,t.y,'o',mfc='w',mew=1,ms=7)
    a.set_title('z=%.2f' % redshift)
    print len(gals)
    #c2 = (gals.z < z0+10) & (gals.z > z0-10)
    #plot(gals.x[c2],gals.y[c2],',',alpha=0.7)
    #plot(gals.x,gals.y,',',alpha=0.7)
    show()
    return a

def processregion(prefix1,prefix2,region,ind,isnaps):
    #fig = pl.figure(ind,figsize=(14,5)); fig.clf()
    #fig.text(0.5,0.95,region)
    print region
    snap = dict()
    for i,isnap in enumerate(isnaps):
        #if i > 0:  fig.add_subplot(1,3,i+1,sharex=a,sharey=a)
        #else:  fig.add_subplot(1,3,i+1)
        h5,gals,trips,rsets,voff = process_snap(prefix1,prefix2,
                                                region,isnap)
        print '%.2f' % h5.redshift
        n3 = countgroups(trips, rsets)
        #a = plotgals(gals,trips,n3.real,h5.redshift)
        snap['z%.2f' % h5.redshift] = Struct(
            h5=h5,gals=gals,trips=trips,rsets=rsets,voff=voff,
            n3=n3,isnap=isnap)
            
        #bins = np.arange(-0.5,30.5,1)
        #pl.hist(n3.real, bins=bins, alpha=0.5, label='real')
        #pl.hist(n3.rand, bins=bins, alpha=0.5, label='random')
        #pl.legend()
        #plotvoff(voff.real,voff.rand,region,h5.redshift,isnap)
    #pl.savefig('all2d_%s.png' % region)
    return snap

def plotregions(snap):
    fig = pl.figure()
    for i,key in enumerate(['z0.25','z0.50','z1.00']):
        i += 1
        s = snap[key]        
        fig.add_subplot(3, 3, 3*i)
        a = plotgals(gals,trips,n3.real,h5.redshift)
        fig.add_subplot(3, 3, 3*i + 1)
        fig.add_subplot(3, 3, 3*i + 2)

def find_dndz(trips,redshift,safedv,mincol=MIN_NHI_CUT,maxcol=99.0):
    """ Find dN/dz for a given set of triples.
    """
    safedz = safedv/Ckms * (redshift + 1)
    deltaz = 2 * safedz
    zpath = 0.
    all_lines = []
    for t in snap.trips:
        zpath += deltaz*3
        all_lines.append(t.la[(t.la.logn > mincol)&(t.la.logn < maxcol)])
        all_lines.append(t.lb[(t.lb.logn > mincol)&(t.lb.logn < maxcol)])
        all_lines.append(t.lc[(t.lc.logn > mincol)&(t.lc.logn < maxcol)])
    all_lines = np.concatenate(all_lines)
    print len(all_lines)
    dndz = len(all_lines) / zpath
    dndzerr = np.sqrt(len(all_lines)) / zpath
    return dndz, dndzerr, len(all_lines), zpath

def colplot2d(prefix1, prefix2, insnap, region, vmin=12.8, vmax=14.5,
              cmap=pl.cm.gray_r, plotgals=True):
    dirname = os.path.join(prefix1,prefix2,region,'snapshot_'+isnap)
    h5name, = glob.glob(dirname + '/Spectrum.Part_los*.hdf5')
    h5 = readh5(h5name,None,debug=True)
    qsep = np.rec.fromrecords(np.loadtxt('qso.data'), names='z,ab,ac,bc')
    c0 = np.abs(qsep.z - h5.redshift) < 0.01
    sep, = qsep[c0]
    alpha = np.arccos((sep.ac**2 + sep.ab**2 - sep.bc**2)/(2.*sep.ac*sep.ab))
    sepx = [0, -sep.ab, -sep.ac*np.cos(alpha)]
    sepy = [0, 0, sep.ac*np.sin(alpha)]
    temp = 'gals_LoRes_%s_%s.txt' % (region, 'snap'+str(int(isnap)))
    galname = os.path.join(prefix1,'LoRes/gals', temp)
    gals = readgals(galname, h5.redshift, h5.xmid, h5.ymid, h5.zmid,
                    h5.Om, h5.Ol)

    #pl.figure()
    a = pl.gca()
    a.cla()
    #pl.scatter(h5.x,h5.y,c=h5.logntot,vmax=14.5,cmap=pl.cm.gray_r,
    #           marker='s',edgecolors='None')
    X = h5.x.reshape(100,100).T
    Y = h5.y.reshape(100,100).T
    htot = h5.logntot.reshape(100,100).T
    cm = a.pcolormesh(X,Y,htot,vmin=vmin,vmax=vmax,cmap=cmap)
    a.axis('tight')
    a.set_autoscale_on(0)
    #plotgals(gals,[],[],h5.redshift)
    c0 = gals.mstel > 0.5
    #pl.plot(gals.x[~c0],gals.y[~c0],',r')
    if plotgals:  pl.plot(gals.x[c0],gals.y[c0],'.r')
    pl.plot(sepx + X[0].mean() + 8, sepy + Y[:,0].mean() + 8, 'oc')
    #a.axis('equal')
    a.set_title('%s z=%.2f' % (region, h5.redshift))
    #a.set_xlim(X.min(),X.max())
    return cm,h5,gals
  
if 0:
    # for a single snapshot read in hdf5 info
    prefix1 = '/media/cosma/data/rw17/nhmc/GIMIC'
    #prefix2 = 'LoRes/Spectra/QSO-correlations'
    prefix2 = 'LoRes/Spectra/QSO-correlations4'
    region = 'Sigma0'
    isnap = '029'
    
    sigmam2 = processregion(
        prefix1,prefix2,'Sigmam2',0,'027 029 031'.split())
    sigmam1 = processregion(
        prefix1,prefix2,'Sigmam1',1,'029 039 046'.split())
    sigma0 = processregion(
        prefix1,prefix2,'Sigma0', 2,'029 039 046'.split())
    sigmap1 = processregion(
        prefix1,prefix2,'Sigmap1',3,'029 039 046'.split())
    sigmap2 = processregion(
        prefix1,prefix2,'Sigmap2',4,'033 043 051'.split())


if 0:
    # Check dN/dz of the lines present in the triples as a function of
    # redshift.

    pl.figure()
    dndztot = np.zeros(3)
    dndztoterrsq = np.zeros(3)
    mincol,maxcol = 13.2,14.0
    for i,region in enumerate((sigmam2,sigmam1,sigma0,sigmap1,sigmap2)):
        i -= 2
        if i in (-2,2):
            weight = 1/12.
        elif i in (-1,1):
            weight = 1/6.
        else:
            weight = 1/2.
        temp = []
        for j,key in enumerate('z0.25 z0.50 z1.00'.split()):
            print key
            snap = region[key]
            n,er,nlines,zpath = find_dndz(snap.trips,snap.h5.redshift,
                                          snap.h5.safedv,mincol=mincol,
                                          maxcol=maxcol)
            snap.nlines = nlines
            snap.zpath = zpath
            dndztot[j] += n * weight
            dndztoterrsq[j] += er*er * weight # add errors in quadrature
            temp.append((n,er))
        dndz, dndzerr = (np.array(a) for a in zip(*temp))
        z = np.array([0.25,0.5,1.0])
        plot(log10(1+z),log10(dndz),'s')
        errs = log10(dndz)-log10(dndz-dndzerr), log10(dndz+dndzerr)-log10(dndz)
        pl.errorbar(log10(1+z),log10(dndz),yerr=errs,fmt='s',
                    label='region %i' % i)

    dndztoterr = np.sqrt(dndztoterrsq)
    errs = (log10(dndztot) - log10(dndztot - dndztoterr),
            log10(dndztot + dndztoterr) - log10(dndztot))
    pl.errorbar(log10(1+z),log10(dndztot),yerr=errs,fmt='sk',label='all')
    pl.legend(numpoints=1,loc='lower right')
    pl.xlabel('log$_{10}$(1+z)')
    pl.ylabel('log$_{10}$(dN/dz)')
    pl.title('%s < log$_{10}$(N) < %s' % (mincol,maxcol))


if 0:
    # run the above commands to find dndz first!
    # calculate the incidence of gal3los groups in the simulations,
    # interms of the number of such groups found over a given redshift
    # path length. we already have the path length from dndz, now we
    # just need the number of groups found / total path length

    formats = 's^ovd'
    pl.figure()
    weights = [1/12., 1/6., 1/2., 1/6., 1/12.]
    for i,region in enumerate((sigmam2,sigmam1,sigma0,sigmap1,sigmap2)):
        dgdztot = np.zeros(3)
        dgdztoterrsq = np.zeros(3)
        weight = weights[i]
        temp = []
        for j,key in enumerate('z0.25 z0.50 z1.00'.split()):
            snap = region[key]
            n3 = sum(snap.n3.real)
            print key, 'ntrip, ',  n3, 'zpath', snap.zpath
            dgdz = n3 / snap.zpath *3
            er = np.sqrt(n3) / snap.zpath *3 
            dgdztot[j] += dgdz * weight
            dgdztoterrsq[j] += er**2 * weight  # add errors in quadrature
            temp.append((dgdz,er))
        dgdz, dgdzerr = (np.array(a) for a in zip(*temp))
        z = np.array([0.25,0.5,1.0])
        #plot(log10(1+z),log10(dgdz),formats[i])
        errs = log10(dgdz)-log10(dgdz-dgdzerr), log10(dgdz+dgdzerr)-log10(dgdz)
        pl.errorbar(log10(1+z), log10(dgdz), yerr=errs, fmt=formats[i],
                    label='region %i' % (i-2))
        #raw_input()

    dgdztoterr = np.sqrt(dgdztoterrsq)
    errs = (log10(dgdztot)-log10(dgdztot-dgdztoterr),
            log10(dgdztot+dgdztoterr)-log10(dgdztot))
    pl.errorbar(log10(1+z),log10(dgdztot),yerr=errs,fmt='sk',label='all')
    pl.legend(numpoints=1,loc='lower right')
    pl.xlabel('log$_{10}$(1+z)')
    pl.ylabel('log$_{10}$(dN/dz) groups')
    pl.title('log$_{10}$(N) > %s, mgal > %.2f stellar masses' % (
        MIN_NHI_CUT, GAL_MASS_CUT))
    
if 0:
    # make plot of total HI column density as a function of x, y
    # position of sightlines, overlayed on galaxy distribution
    prefix1 = '/media/cosma/data/rw17/nhmc/GIMIC'
    prefix2 = 'LoRes/Spectra/QSO-correlations3'
    colour='r'

    f = pl.figure(figsize=(10,10))
    for i,isnap in enumerate('027 029 031'.split()):
        f.add_subplot(2,2,i+1)
        c,h5,gals = colplot2d(prefix1,prefix2,isnap,'Sigmam2')
    a = f.add_subplot(2,2,4,visible=False)
    cb = pl.colorbar(c,ax=a)
    cb.set_label('log N$_{HI}$'); pl.show()
    
    f = pl.figure(figsize=(10,10))
    for i,isnap in enumerate('029 039 046'.split()):
        f.add_subplot(2,2,i+1)
        c,h5,gals = colplot2d(prefix1,prefix2,isnap,'Sigmam1')
    a = f.add_subplot(2,2,4,visible=False)
    cb = pl.colorbar(c,ax=a)
    cb.set_label('log N$_{HI}$'); pl.show()
        
    f = pl.figure(figsize=(10,10))
    for i,isnap in enumerate('029 039 046'.split()):
        f.add_subplot(2,2,i+1)
        c,h5,gals = colplot2d(prefix1,prefix2,isnap,'Sigma0')
    a = f.add_subplot(2,2,4,visible=False)
    cb = pl.colorbar(c,ax=a)
    cb.set_label('log N$_{HI}$'); pl.show()

    f = pl.figure(figsize=(10,10))
    for i,isnap in enumerate('033 043 051'.split()): # 051
        f.add_subplot(2,2,i+1)
        c,h5,gals = colplot2d(prefix1,prefix2,isnap,'Sigmap2',plotgals=0)
    a = f.add_subplot(2,2,4,visible=False)
    cb = pl.colorbar(c,ax=a)
    cb.set_label('log N$_{HI}$'); pl.show()

    f = pl.figure(figsize=(10,10))
    for i,isnap in enumerate('033 043 051'.split()): # 051
        f.add_subplot(2,2,i+1)
        c,h5,gals = colplot2d(prefix1,prefix2,isnap,'Sigmap2')
    a = f.add_subplot(2,2,4,visible=False)
    cb = pl.colorbar(c,ax=a)
    cb.set_label('log N$_{HI}$'); pl.show()

if 0:
    # 3d plots
    from enthought.mayavi import mlab
    trip3 = [t for i,t in enumerate(trips) if n3.real[i] > 0]
    plotgals_3d(gals,trip3,h5.redshift)

    g = gals[gals.mtot>500]
    mlab.points3d(g.x,g.y,g.z)
    mlab.quiver3d(g.x,g.y,g.z,g.vx,g.vy,g.vz,color=(1,0,0))
    
    plotgals_3d(gals,trip3,h5.redshift)
