/**
 * Class: TinEar
 *
 * Purpose: 
 *
 * Created: Feb 2, 2010 @ 1:01:03 PM
 * @author Ken Scott
 */
package org.openfantasia.musica.test;

import org.openfantasia.musica.Musica;
import net.beadsproject.beads.analysis.featureextractors.FFT;
import net.beadsproject.beads.analysis.featureextractors.PeakDetector;
import net.beadsproject.beads.analysis.featureextractors.PowerSpectrum;
import net.beadsproject.beads.analysis.featureextractors.SpectralDifference;
import net.beadsproject.beads.analysis.segmenters.ShortFrameSegmenter;
import net.beadsproject.beads.core.AudioContext;
import net.beadsproject.beads.core.Bead;
import net.beadsproject.beads.data.Sample;
import net.beadsproject.beads.ugens.*;

import javax.swing.*;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;

public class TinEar {

	public static void main(String[] args) throws Exception {
		final Musica mc = new Musica();
		test1(mc);
	}


	public static void test1(final Musica mc) throws Exception {

		// read input from microphone
		RTInput input = mc.newRTInput();
		mc.getOut().addInput(input);

		// setVertices up a recorder
		final Sample s = new Sample(mc.audioContext.getAudioFormat(), 1000);
		final GranularSamplePlayer sp = new GranularSamplePlayer(mc.audioContext, s);
		sp.addInput(input);
		sp.setPitchEnvelope(new Envelope(mc.audioContext, (float) 2.0));
		Gain gain = mc.newGain(1, 1.0);
		gain.addInput(sp);
		mc.getOut().addInput(gain);

		final Sample s2 = new Sample(mc.audioContext.getAudioFormat(), 1000);
		final Recorder r = new Recorder(mc.audioContext, s2, Recorder.Mode.INFINITE);
		r.setResizingParameters(0, 1000);
		r.addInput(gain);
		mc.getOut().addDependent(r);

		// setVertices up a user-input trigger
		final WindowAdapter trigger = new WindowAdapter() {
			public void windowClosing(WindowEvent e) {
				// TODO Auto-generated method stub
				// stop the recorder, and clip the final sample
				mc.audioContext.stop();
				r.pause(true);
				// r.clip(); (don't clip, let's here the silence too....)

				// output the sample to a file
				try {
					DateFormat df = new SimpleDateFormat("yyyy-dd-MM-HH-mm-ss");
					s.write("Y:\\samples\\direct\\AUDIO-" + df.format(new Date()) + ".aif");
				}
				catch (IOException e1) {
					// TODO Auto-generated catch block
					e1.printStackTrace();
				}

				// stop the program
				mc.audioContext.stop();
			}
		};

		JFrame frame = new JFrame("Recording Session Example");
		frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
		frame.add(new JLabel("recording... close to quit."));
		frame.addWindowListener(trigger);
		frame.pack();
		frame.setVisible(true);

		mc.start();
	}


	public static void test2(final Musica mc) throws Exception {
		RTInput input = new RTInput(mc.audioContext);
		mc.audioContext.out.addInput(input);

		TapIn tin = new TapIn(mc.audioContext, 10000f);
		tin.addInput(input);

		int max = 50;
		for (int i = 1; i < max; i++) {
			float delay = (float) i * 100f;
			TapOut tout = new TapOut(mc.audioContext, tin, delay);
			Gain g = new Gain(mc.audioContext, 1, 0.95f / (float) i);
			g.addInput(tout);
			mc.getOut().addInput(g);
		}
		mc.audioContext.start();
	}


	public static void test3(final Musica mc) throws Exception {
		RTInput input = new RTInput(mc.audioContext);
//		mc.audioContext.out.addInput(input);

		TapIn tin = new TapIn(mc.audioContext, 10000f);
		tin.addInput(input);

		TapOut tout = new TapOut(mc.audioContext, tin, 0);
		final GranularSamplePlayer sp = new GranularSamplePlayer(mc.audioContext, 1);
		sp.addInput(tout);
		sp.setPitchEnvelope(new Envelope(mc.audioContext, (float) 2.0));
		Gain gain = mc.newGain(1, 0.5);
		gain.addInput(sp);
		mc.getOut().addInput(gain);

		mc.audioContext.start();
	}


	public static void test4(final Musica mc) throws Exception {
		AudioContext ac = mc.audioContext;	//new AudioContext(2048, 5000, new AudioFormat(44100, 16, 2, true, true));
		RTInput input = new RTInput(ac);
		ac.out.addInput(input);

		ShortFrameSegmenter sfs = new ShortFrameSegmenter(ac);
		sfs.addInput(ac.out);
		ac.out.addDependent(sfs);
		int chunkSize = 512;
		sfs.setChunkSize(chunkSize);
		sfs.setHopSize(chunkSize / 2);

		//setVertices up the fft
		FFT fft = new FFT();
		sfs.addListener(fft);
		PowerSpectrum ps = new PowerSpectrum();
		fft.addListener(ps);

		//setVertices up spectral difference
		SpectralDifference sd = new SpectralDifference(ac.getSampleRate());
		//sd.setFreqWindow(80.f,1100.f);
		ps.addListener(sd);

		PeakDetector od = new PeakDetector();
		od.setThreshold(0.2f);
		od.setAlpha(.9f);
		sd.addListener(od);
		/*
		od.addThresholdListener(new FeatureExtractor<float[],float[]>(){
			public void process(float[] f)
			{
				System.out.print(f[0]);
				System.out.print(" ");
			}
		});		  */
		od.addMessageListener(new Bead() {
			protected void messageReceived(Bead message) {
				System.out.print(".");
			}
		});

//		WavePlayer wp = new WavePlayer(ac, 440f, new SineBuffer().getDefault());
//		ac.out.addInput(wp);

		ac.start();
	}
}
