
package auditoryUIbk;
import jass.engine.*;
import jass.render.*;
import javax.sound.sampled.*;
import java.io.*;
import java.net.*;
import java.util.Hashtable;

/**
   A buffer loaded from an audio file or URL.
   @author Kees van den Doel (kvdoel@cs.ubc.ca)
   Pierre Dragicevic
*/
public class StereoFileBuffer {
    /** Sampling rate  in Hertz. */
    public float srate;

    /** Bytes per frame (2 for mono, 4 stereo, etc.) */
    public int bytesPerFrame;

    /** # frames */
    public long nFrames;
    
    /** bytes */
    public int numBytes;
    
    /** Bits per sample */
    public int bitsPerSample;

    /** Number of channels */
    public int nChannels;
    
    /** Length of buffer as floats. */
    public int bufsz;

    /** Buffer. */
    public float[] buf;
    
    float maxlevel;
    
    public boolean ok = false;
    //public AudioFormat format; // other format info
    
    static Hashtable cache = new Hashtable();

    /** Construct buffer from named file.
        @param fn Audio file name.
    */
    private StereoFileBuffer(String fn, float leftVolume, float rightVolume, float leftDelay, float rightDelay, float noiseLevelBefore, float noiseLevelAfter) {
        loadAudio(fn);
        normalize();
         if (fn.indexOf("paulaaudio") > 0 && fn.indexOf("training") > 0) {
             // hack: tts-generated numbers are better if not strongly trimmed
         	trim_begin(noiseLevelBefore * maxlevel/2);
        } else {
        	trim_begin(noiseLevelBefore * maxlevel);
        	trim_end(noiseLevelAfter * maxlevel);
        }
        spatialize(leftVolume, rightVolume, leftDelay, rightDelay);
    }
    
    /**
     * this uses the caching mechanism
     */ 
    public static StereoFileBuffer getBuffer(String fn, float leftVolume, float rightVolume, float leftDelay, float rightDelay, float noiseLevelBefore, float noiseLevelAfter) {
        String id = fn + " " + leftVolume + " " + rightVolume + " " + leftDelay + " " + rightDelay + " " + noiseLevelBefore + " " + noiseLevelAfter;
    	// already in cache?
    	if (cache.containsKey(id)) {
    		// reuse the existing instance
    		return (StereoFileBuffer)cache.get(id);
    	}
    	
    	// no, create new
    	StereoFileBuffer buffer = new StereoFileBuffer(fn, leftVolume, rightVolume, leftDelay, rightDelay, noiseLevelBefore, noiseLevelAfter);
    	cache.put(id, buffer);
    	return buffer;
    }
    
    /** Load audio file.
        @param fn Audio file name.
    */
    public void loadAudio(String fn) {
        int totalFramesRead = 0;
        File fileIn = new File(fn);
        byte[] audioBytes = null;
        float originalrate = 0;
        try {
        	
        	// -- open stream to file and get info
            AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(fileIn);
            bytesPerFrame = audioInputStream.getFormat().getFrameSize();
            bitsPerSample = audioInputStream.getFormat().getSampleSizeInBits();
            nChannels = audioInputStream.getFormat().getChannels();
            srate = (float)audioInputStream.getFormat().getSampleRate();
            nFrames = audioInputStream.getFrameLength();
            numBytes = (int)nFrames * bytesPerFrame; 
            
            // --- convert to 16 bits, mono, signed, little endian using tritonus
            AudioFormat	format2 = new AudioFormat(
            		srate,
            		16,
            		1,
                    true,
                    false);

            originalrate = srate;
            
            //System.out.println("FROM: " + audioInputStream.getFormat());
            //System.out.println("TO:   " + format2);
            
            org.tritonus.sampled.convert.PCM2PCMConversionProvider tritonusConverter = new org.tritonus.sampled.convert.PCM2PCMConversionProvider();
            audioInputStream = tritonusConverter.getAudioInputStream(format2, audioInputStream);

            double sizechange = 16.0 / bitsPerSample / nChannels;
            bitsPerSample = 16;
            nChannels = 1;
            bytesPerFrame = (int)(bytesPerFrame * sizechange);
            numBytes = (int)(numBytes * sizechange);
            
            // --- convert to working sample rate using tritonus
            AudioFormat	format3 = new AudioFormat(
            		SoundManager.srate,
            		bitsPerSample,
            		nChannels,
                    true,
                    false);
            org.tritonus.sampled.convert.SampleRateConversionProvider tritonusConverter2 = new org.tritonus.sampled.convert.SampleRateConversionProvider();
            audioInputStream = tritonusConverter2.getAudioInputStream(format3, audioInputStream);
            sizechange = 1.0 * SoundManager.srate / srate;
            srate = (float)audioInputStream.getFormat().getSampleRate();
            bytesPerFrame = (int)(bytesPerFrame * sizechange);
            numBytes = (int)(numBytes * sizechange);
            
            audioBytes = new byte[numBytes];
            // load            
            int numBytesRead = 0;
            int numFramesRead = 0;
            while ((numBytesRead = audioInputStream.read(audioBytes)) != -1);
            audioInputStream.close();
            ok = true;
        } catch (Exception e) {
        	if (e instanceof FileNotFoundException) {
        		System.err.println("Can't find audio file " + fn);
        	} else {
        		e.printStackTrace();
        	}
            bufsz = 0;
            buf = new float[] {};
            return;
        }

        bufsz = numBytes/2;
                
        buf = new float[bufsz]; // stereo
        byteToFloat(buf, audioBytes, bufsz, false);
        
        System.out.println("loaded " + fn + " (" + originalrate + " Hz)");

//if (buf.length > 10000) 
//	JSampleViewer.view(buf);
    }
    
	    /** Convert byte array to float array.
	    @param dbuf User provided float array to return result in.
	    @param dbuf User provided byte array to convert.
	    @param bufsz Sumber of float samples to write.
	*/
	static final public void byteToFloat(float [] dbuf, byte[] bbuf, int bufsz, boolean bigEndian) {
	    int ib=0;
	    if(bigEndian) {
	        for(int i=0;i<bufsz;i++) {
	            short y = bbuf[ib];
	            y = (short)(y<<8);
	            y += bbuf[ib+1];
	            ib += 2;
	            dbuf[i] = y/32768.f;
	        }
	    } else {
	        for(int i=0;i<bufsz;i++) {
	            short y = (short)(bbuf[ib+1]);
	            y = (short)(y<<8);
	            if (bbuf[ib] >= 0)
	            	y += bbuf[ib];
	            else
	            	y += bbuf[ib] + 256;
//if (y < -256) y += 256;
	            dbuf[i] = y/32768.f;
	            
	            //dbuf[i] = ((bbuf[ib+1]+128)*256 + bbuf[ib]+128)/655536f-0.05f;
	            //dbuf[i] = (bbuf[ib+1]+0.5f)/128f + bbuf[ib]/32768.f;
	            ib += 2;
	        }
	    }
	}

    
    // not used any more
    public void convertToMono() {
      if (!ok) return;
  	  int bufsz2 = bufsz/nChannels;
  	  float[] buf2 = new float[bufsz2];
  	  for (int i = 0; i<bufsz2; i++) {
  	  	// mix Channels
  	  	float y = 0;
  	  	for (int j = 0; j<nChannels; j++) {
  	  		y += buf[i*nChannels+j];
  	  	}
  	  	buf2[i] = y/nChannels;
  	  }
		nChannels = 1;
		bufsz = bufsz2;
		buf = buf2;
		bytesPerFrame /= nChannels;
		numBytes /= nChannels;
    }
    
    public void trim_begin(float threshold) {
        if (!ok) return;
 
        float[] buf2 = null;
        int begin = -1;
	    for (int i = 0; i<bufsz; i++) {
		  if (begin == -1) {
	  		  if (buf[i] > threshold) {
	  			  begin = i;
	  		      buf2 = new float[bufsz - begin];
	  		  }
		  }
		  if (begin != -1) {
   			buf2[i-begin] = buf[i];
		  }
	    }
	    //
	    buf = buf2;
	    bufsz -= begin;
	    nFrames -= begin;
    }

    public void trim_end(float threshold) {
        if (!ok) return;
 
        float[] buf2 = null;
        int end = -1;
	    for (int i = bufsz-1; i>=0; i--) {
		  if (end == -1) {
	  		  if (buf[i] > threshold) {
	  			  end = i;
	  		      buf2 = new float[end+1];
	  		  }
		  }
		  if (end != -1) {
   			buf2[i] = buf[i];
		  }
	    }
	    //
	    buf = buf2;
	    bufsz = end+1;
	    nFrames = end+1;
    }

    public void normalize() {
    	// compute max
    	maxlevel = 0;
    	for (int i = 0; i<bufsz; i++) {
	  	  	// mix Channels
	  	  	float level = Math.abs(buf[i]);
	  	  	if (level > maxlevel) maxlevel = level;
	  	}

  		if (maxlevel < 1 && maxlevel > 0.01f) {
  	    	for (int i = 0; i<bufsz; i++) {
  		  	  	// mix Channels
  		  	  	buf[i] /= maxlevel;
  		  	}
  	    	maxlevel = 1;
  		}
    }
   
    public void spatialize(float leftVolume, float rightVolume, float leftDelay, float rightDelay) {
        if (!ok) return;
    	  //
    	  int LD = Math.round(srate * leftDelay);
    	  int RD = Math.round(srate * rightDelay);
    	  int MaxD = Math.max(LD, RD);
     	  //
    	  int bufsz2 = (bufsz + MaxD)*2;
    	  float[] buf2 = new float[bufsz2];
    	  for (int i = 0; i<bufsz; i+=nChannels) {
    	  	float Ly = (i-LD<0 || i-LD>bufsz-1) ? 0 : buf[i-LD]*leftVolume;
    	  	float Ry = (i-RD<0 || i-RD>bufsz-1) ? 0 : buf[i-RD]*rightVolume;
    	  	buf2[i*2] = Ry;
    	  	buf2[i*2+1] = Ly;
    	  }
    	  //
			nChannels = 2;
			bufsz = bufsz2;
			buf = buf2;
			bytesPerFrame *= 2;
			nFrames += MaxD;
			numBytes = (numBytes+MaxD)*2;
    }
}



