package jsoundcapturever2;

import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.geom.Line2D;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Vector;

import javax.sound.sampled.AudioFileFormat;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioFormat.Encoding;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
import javax.swing.JPanel;

public class MyRecorder {

	static final int BUF_SIZE = 16384;
	private byte[] arrFile;
	// private byte[] audioBytes;
	// private float[] audioData;
	private FileOutputStream fos;
	private ByteArrayInputStream bis;
	private AudioInputStream audioInputStream;
	private AudioFormat format;
	private double duration, seconds;
	File file;
	String fileName = "untitled";
	Vector lines = new Vector();
	/** Format Controls **/
	Encoding encoding;
	float samplingRate;
	int sampleBitSize;
	// boolean isSigned
	boolean isBigEndian;
	int channels;
	static final int ENCODING_ULAW = 1;
	static final int ENCODING_ALAW = 2;
	static final int ENCODING_LINEAR = 3;

	/** **/
	Capture capture;
	Playback playback;
	SamplingGraph samplingGraph;
	boolean isShowWaveForm;

	public MyRecorder(final boolean isShowWaveForm) {
		setDefaultFormat();
		this.isShowWaveForm = isShowWaveForm;
		if (isShowWaveForm) {
			samplingGraph = new SamplingGraph();
		}
		playback = new Playback();
		capture = new Capture();
	}

	public void startRecord(final int duration) throws Exception {
		startRecord();
		Thread.sleep(duration);
		capture.stop();
		if (isShowWaveForm) {
			samplingGraph.stop();
		}
	}

	public void startRecord() throws Exception {
		capture = new Capture();
		capture.start();
		if (isShowWaveForm) {
			samplingGraph.start();
		}
	}

	public void stopRecord() {
		if (capture != null) {
			capture.stop();
			capture = null;
			if (isShowWaveForm) {
				samplingGraph.stop();
			}
		}
	}

	public void playRecorded() {
		playback = new Playback();
		playback.start();
		if (isShowWaveForm) {
			samplingGraph.start();
		}
	}

	/**
	 * set Default Format
	 */
	private void setDefaultFormat() {
		try {
			setFormat(ENCODING_LINEAR, 44100, 16, true, false, 2);
		} catch (final Exception e) {
			e.printStackTrace();
		}
	}

	// public byte[] getAudioBytes() {
	// return audioBytes;
	// }

	public JPanel getSamplingGraph() throws Exception {
		if (isShowWaveForm) {
			return samplingGraph;
		} else
			throw new Exception("Wave Form not Displaying...");

	}

	public double getDuration() {
		return duration;
	}

	public void setFormat(final int ENCODING, final float samplingRate, final int sampleBitSize, final boolean isSigned,
			final boolean isBigEndian, final int channels) throws Exception {
		// TODO: Check for invalid parameters
		this.samplingRate = samplingRate;
		this.sampleBitSize = sampleBitSize;
		this.isBigEndian = isBigEndian;
		this.channels = channels;

		/** select encoding format **/
		if (ENCODING == ENCODING_LINEAR) {
			if (isSigned) {
				encoding = AudioFormat.Encoding.PCM_SIGNED;
			} else {
				encoding = AudioFormat.Encoding.PCM_UNSIGNED;
			}
		} else if (ENCODING == ENCODING_ALAW) {
			encoding = AudioFormat.Encoding.ALAW;
		} else if (ENCODING == ENCODING_ULAW) {
			encoding = AudioFormat.Encoding.ULAW;
		} else {
			throw new Exception();
		}
		updateFormat();
	}

	private void updateFormat() {
		format = new AudioFormat(encoding, samplingRate, sampleBitSize, channels, (sampleBitSize / 8) * channels, samplingRate, isBigEndian);
	}

	public AudioFormat getFormat() {
		updateFormat();
		return format;
	}

	/**
	 * Reads data from the input channel and writes to the output stream
	 */
	class Capture implements Runnable {

		TargetDataLine line;
		Thread thread;

		public void start() {
			thread = new Thread(this);
			thread.setName("Capture");
			thread.start();
		}

		public void stop() {
			thread = null;
		}

		private void shutDown(final String message) {
			if (message != null && thread != null) {
				thread = null;
			}
			System.out.println(message);
		}

		@Override
		public void run() {

			duration = 0;
			audioInputStream = null;

			line = getTargetDataLineForRecord();

			// play back the captured audio data
			final ByteArrayOutputStream out = new ByteArrayOutputStream();
			final int frameSizeInBytes = format.getFrameSize();
			final int bufferLengthInFrames = line.getBufferSize() / 8;
			final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
			final byte[] data = new byte[bufferLengthInBytes];
			int numBytesRead;

			line.start();

			while (thread != null) {
				if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) {
					break;
				}
				System.out.println("capturing..");
				out.write(data, 0, numBytesRead);
			}

			// we reached the end of the stream. stop and close the line.
			line.stop();
			line.close();
			line = null;

			// stop and close the output stream
			try {
				out.flush();
				out.close();
			} catch (final IOException ex) {
				ex.printStackTrace();
			}

			// load bytes into the audio input stream for playback

			final byte audioBytes[] = out.toByteArray();
			final ByteArrayInputStream bais = new ByteArrayInputStream(audioBytes);
			audioInputStream = new AudioInputStream(bais, format, audioBytes.length / frameSizeInBytes);

			final long milliseconds = (long) ((audioInputStream.getFrameLength() * 1000) / format.getFrameRate());
			duration = milliseconds / 1000.0;
			System.out.println(duration);
			try {
				audioInputStream.reset();
			} catch (final Exception ex) {
				ex.printStackTrace();
				return;
			}
			if (isShowWaveForm) {
				samplingGraph.createWaveForm(audioBytes);
			}
		}
	} // End class Capture

	/**
	 * Write data to the OutputChannel.
	 */
	public class Playback implements Runnable {

		SourceDataLine line;
		Thread thread;

		public void start() {
			thread = new Thread(this);
			thread.setName("Playback");
			thread.start();
		}

		public void stop() {
			thread = null;
		}

		private void shutDown(final String message) {
			if (thread != null) {
				thread = null;
			}
			System.out.println(message);
		}

		@Override
		public void run() {

			// reload the file if loaded by file
			if (file != null) {
				createAudioInputStreamFromFile(file);
			}

			// make sure we have something to play
			if (audioInputStream == null) {
				shutDown("No loaded audio to play back");
				return;
			}
			// reset to the beginnning of the stream
			try {
				audioInputStream.reset();
			} catch (final Exception e) {
				shutDown("Unable to reset the stream\n" + e);
				return;
			}

			// get an AudioInputStream of the desired format for playback
			final AudioFormat format = getFormat();
			final AudioInputStream playbackInputStream = AudioSystem.getAudioInputStream(format, audioInputStream);

			if (playbackInputStream == null) {
				shutDown("Unable to convert stream of format " + audioInputStream + " to format " + format);
				return;
			}
			line = getSourceDataLineForPlayback();
			// play back the captured audio data

			final int frameSizeInBytes = format.getFrameSize();
			final int bufferLengthInFrames = line.getBufferSize() / 8;
			final int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
			final byte[] audioBuffer = new byte[bufferLengthInBytes];
			int numBytesRead = 0;

			// start the source data line
			line.start();

			while (thread != null) {
				try {
					if ((numBytesRead = playbackInputStream.read(audioBuffer)) == -1) {
						break;
					}
					int numBytesRemaining = numBytesRead;
					while (numBytesRemaining > 0) {
						numBytesRemaining -= line.write(audioBuffer, 0, numBytesRemaining);
					}
				} catch (final Exception e) {
					shutDown("Error during playback: " + e);
					break;
				}
			}
			// we reached the end of the stream. let the data play out, then
			// stop and close the line.
			if (thread != null) {
				line.drain();
			}
			line.stop();
			line.close();
			line = null;
			shutDown(null);
		}
	} // End class Playback

	/**
	 * Render a WaveForm.
	 */
	class SamplingGraph extends JPanel implements Runnable {

		private Thread thread;
		// private Font font10 = new Font("serif", Font.PLAIN, 10);
		private final Font font12 = new Font("serif", Font.PLAIN, 12);
		Color jfcBlue = new Color(204, 204, 255);
		Color pink = new Color(255, 175, 175);

		public SamplingGraph() {
			setBackground(new Color(20, 20, 20));
		}

		@SuppressWarnings("unchecked")
		public void createWaveForm(byte[] audioBytes) {

			lines.removeAllElements(); // clear the old vector

			final AudioFormat format = audioInputStream.getFormat();
			if (audioBytes == null) {
				try {
					audioBytes = new byte[(int) (audioInputStream.getFrameLength() * format.getFrameSize())];
					audioInputStream.read(audioBytes);
				} catch (final Exception ex) {
					return;
				}
			}

			final Dimension d = getSize();
			final int w = d.width;
			final int h = d.height - 15;
			final int[] audioData = extractDataFromAudioInputStream();
			// compacting of frames into one pixel
			final int frames_per_pixel = audioBytes.length / format.getFrameSize() / w;
			byte my_byte = 0;
			double y_last = 0;
			final int numChannels = format.getChannels();
			for (double x = 0; x < w && audioData != null; x++) {
				final int idx = (int) (frames_per_pixel * numChannels * x);

				if (format.getSampleSizeInBits() == 8) {
					// if 8-bit sampling
					my_byte = (byte) audioData[idx];
				} else {
					// if 16-bit sampling
					my_byte = (byte) (128 * audioData[idx] / 32768);
				}
				final double y_new = (h * (128 - my_byte) / 256);
				System.out.println(y_new);
				lines.add(new Line2D.Double(x, y_last, x, y_new));
				y_last = y_new;
			}

			repaint();
		}

		@Override
		public void paint(final Graphics g) {

			final Dimension d = getSize();
			final int w = d.width;
			final int h = d.height;
			final int INFOPAD = 15;

			final Graphics2D g2 = (Graphics2D) g;
			g2.setBackground(getBackground());
			g2.clearRect(0, 0, w, h);
			g2.setColor(Color.white);
			g2.fillRect(0, h - INFOPAD, w, INFOPAD);

			if (null != capture && capture.thread != null) {
				g2.setColor(Color.black);
				g2.setFont(font12);
				g2.drawString("Length: " + String.valueOf(seconds), 3, h - 4);
			} else {
				g2.setColor(Color.black);
				g2.setFont(font12);
				g2.drawString("File: " + fileName + "  Length: " + String.valueOf(duration) + "  Position: " + String.valueOf(seconds), 3,
						h - 4);

				if (audioInputStream != null) {
					// .. render sampling graph ..
					g2.setColor(jfcBlue);
					for (int i = 1; i < lines.size(); i++) {
						g2.draw((Line2D) lines.get(i));
					}

					// .. draw current position ..
					if (seconds != 0) {
						final double loc = seconds / duration * w;
						g2.setColor(pink);
						g2.setStroke(new BasicStroke(3));
						g2.draw(new Line2D.Double(loc, 0, loc, h - INFOPAD - 2));
					}
				}
			}
		}

		public void start() {
			thread = new Thread(this);
			thread.setName("SamplingGraph");
			thread.start();
			seconds = 0;
		}

		public void stop() {
			if (thread != null) {
				thread.interrupt();
			}
			thread = null;
		}

		@Override
		public void run() {
			seconds = 0;
			while (thread != null) {
				if ((playback.line != null) && (playback.line.isOpen())) {

					final long milliseconds = (playback.line.getMicrosecondPosition() / 1000);
					seconds = milliseconds / 1000.0;
				} else if (null != capture && (capture.line != null) && (capture.line.isActive())) {

					final long milliseconds = (capture.line.getMicrosecondPosition() / 1000);
					seconds = milliseconds / 1000.0;
				}

				try {
					Thread.sleep(100);
				} catch (final Exception e) {
					break;
				}

				repaint();

				while (null != capture && (capture.line != null && !capture.line.isActive())
						|| (playback.line != null && !playback.line.isOpen())) {
					try {
						Thread.sleep(10);
					} catch (final Exception e) {
						break;
					}
				}
			}
			seconds = 0;
			repaint();
		}
	} // End class SamplingGraph

	// **********************************//
	private SourceDataLine getSourceDataLineForPlayback() {
		SourceDataLine line;
		// define the required attributes for our line,
		// and make sure a compatible line is supported.

		final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
		if (!AudioSystem.isLineSupported(info)) {
			return null;
		}

		// get and open the source data line for playback.

		try {
			line = (SourceDataLine) AudioSystem.getLine(info);
			line.open(format, BUF_SIZE);
		} catch (final LineUnavailableException ex) {
			return null;
		}
		return line;
	}

	private TargetDataLine getTargetDataLineForRecord() {
		TargetDataLine line;
		// define the required attributes for our line,
		// and make sure a compatible line is supported.
		final AudioFormat format = getFormat();
		final DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);

		if (!AudioSystem.isLineSupported(info)) {
			return null;
		}
		// get and open the target data line for capture.
		try {
			line = (TargetDataLine) AudioSystem.getLine(info);
			line.open(format, line.getBufferSize());
		} catch (final Exception ex) {
			return null;
		}
		return line;
	}

	public void playFreq(final double frequency) {
		System.out.println(format);
		final double sampleRate = format.getSampleRate();
		final int sampleSizeInBytes = format.getSampleSizeInBits() / 8;
		final int channels = format.getChannels();

		final byte audioBuffer[] = new byte[(int) Math.pow(2.0, 19.0) * channels * sampleSizeInBytes];

		for (int i = 0; i < audioBuffer.length; i += sampleSizeInBytes * channels) {
			final int wave = (int) (127.0 * Math.sin(2.0 * Math.PI * frequency * i / (sampleRate * sampleSizeInBytes * channels)));
			System.out.println(" " + wave);
			// wave = (wave > 0 ? 127 : -127);

			if (channels == 1) {
				if (sampleSizeInBytes == 1) {
					audioBuffer[i] = (byte) (wave);
				}

				else if (sampleSizeInBytes == 2) {
					audioBuffer[i] = (byte) (wave);
					audioBuffer[i + 1] = (byte) (wave >>> 8);
				}
			}

			else if (channels == 2) {
				if (sampleSizeInBytes == 1) {
					audioBuffer[i] = (byte) (wave);
					audioBuffer[i + 1] = (byte) (wave);
				}

				else if (sampleSizeInBytes == 2) {
					audioBuffer[i] = (byte) (wave);
					audioBuffer[i + 1] = (byte) (wave >>> 8);

					audioBuffer[i + 2] = (byte) (wave);
					audioBuffer[i + 3] = (byte) (wave >>> 8);
				}
			}
		}

		getSourceDataLineForPlayback().write(audioBuffer, 0, audioBuffer.length);
	}

	public void createAudioInputStreamFromFile(final File file) {
		if (file != null && file.isFile()) {
			try {
				this.file = file;
				audioInputStream = AudioSystem.getAudioInputStream(file);
				fileName = file.getName();
				final long milliseconds = (long) ((audioInputStream.getFrameLength() * 1000) / audioInputStream.getFormat().getFrameRate());
				duration = milliseconds / 1000.0;
				format = audioInputStream.getFormat();
			} catch (final Exception ex) {
			}
		} else {
		}
	}

	public int[] extractAmplitudeFromFile(final File wavFile) {
		try {
			// create file input stream
			final FileInputStream fis = new FileInputStream(wavFile);
			// create bytearray from file
			arrFile = new byte[(int) wavFile.length()];
			fis.read(arrFile);
		} catch (final Exception e) {
			System.out.println("SomeException : " + e.toString());
		}
		return extractAmplitudeFromFileByteArray(arrFile);
	}

	public int[] extractAmplitudeFromFileByteArray(final byte[] arrFile) {
		// System.out.println("File :  "+wavFile+""+arrFile.length);
		bis = new ByteArrayInputStream(arrFile);
		return extractAmplitudeFromFileByteArrayInputStream(bis);
	}

	/**
	 * for extracting amplitude array the format we are using :16bit, 22khz, 1
	 * channel, littleEndian,
	 * 
	 * @return PCM audioData
	 * @throws Exception
	 */
	public int[] extractAmplitudeFromFileByteArrayInputStream(final ByteArrayInputStream bis) {
		try {
			audioInputStream = AudioSystem.getAudioInputStream(bis);
		} catch (final UnsupportedAudioFileException e) {
			System.out.println("unsupported file type, during extract amplitude");
			e.printStackTrace();
		} catch (final IOException e) {
			System.out.println("IOException during extracting amplitude");
			e.printStackTrace();
		}
		final float milliseconds = (long) ((audioInputStream.getFrameLength() * 1000) / audioInputStream.getFormat().getFrameRate());
		duration = milliseconds / 1000.0;
		return extractDataFromAudioInputStream();
	}

	public int[] extractDataFromAudioInputStream() {
		final AudioFormat format = audioInputStream.getFormat();
		final byte[] audioBytes = new byte[(int) (audioInputStream.getFrameLength() * format.getFrameSize())];
		// calculate durationSec
		final float milliseconds = (long) ((audioInputStream.getFrameLength() * 1000) / audioInputStream.getFormat().getFrameRate());
		duration = milliseconds / 1000.0;
		System.out.println("The current signal has duration " + duration + " Sec");
		try {
			audioInputStream.read(audioBytes);
		} catch (final IOException e) {
			System.out.println("IOException during reading audioBytes");
			e.printStackTrace();
		}
		return extractDataFromAmplitudeByteArray(format, audioBytes);
	}

	public int[] extractDataFromAmplitudeByteArray(final AudioFormat format, final byte[] audioBytes) {
		// convert
		int[] audioData = null;
		if (format.getSampleSizeInBits() == 16) {
			final int nlengthInSamples = audioBytes.length / 2;
			audioData = new int[nlengthInSamples];
			if (format.isBigEndian()) {
				for (int i = 0; i < nlengthInSamples; i++) {
					/* First byte is MSB (high order) */
					final int MSB = audioBytes[2 * i];
					/* Second byte is LSB (low order) */
					final int LSB = audioBytes[2 * i + 1];
					audioData[i] = MSB << 8 | (255 & LSB);
				}
			} else {
				for (int i = 0; i < nlengthInSamples; i++) {
					/* First byte is LSB (low order) */
					final int LSB = audioBytes[2 * i];
					/* Second byte is MSB (high order) */
					final int MSB = audioBytes[2 * i + 1];
					audioData[i] = MSB << 8 | (255 & LSB);
				}
			}
		} else if (format.getSampleSizeInBits() == 8) {
			final int nlengthInSamples = audioBytes.length;
			audioData = new int[nlengthInSamples];
			if (format.getEncoding().toString().startsWith("PCM_SIGN")) {
				for (int i = 0; i < audioBytes.length; i++) {
					audioData[i] = audioBytes[i];
				}
			} else {
				for (int i = 0; i < audioBytes.length; i++) {
					audioData[i] = audioBytes[i] - 128;
				}
			}
		}// end of if..else
			// System.out.println("PCM Returned===============" +
			// audioData.length);
		return audioData;
	}

	/**
	 * Save to file.
	 * 
	 * @param name
	 *            the name
	 * @param fileType
	 *            the file type
	 */
	public void saveToFile(final String name, final AudioFileFormat.Type fileType, final AudioInputStream audioInputStream) {
		File myFile = new File(name);
		if (!myFile.exists())
			myFile.mkdir();

		if (audioInputStream == null) {
			return;
		}
		// reset to the beginnning of the captured data
		try {
			audioInputStream.reset();
		} catch (final Exception e) {
			return;
		}
		myFile = new File(name + ".wav");
		int i = 0;
		while (myFile.exists()) {
			final String temp = String.format(name + "%d", i++);
			myFile = new File(temp + ".wav");
		}
		try {
			if (AudioSystem.write(audioInputStream, fileType, myFile) == -1) {
			}
		} catch (final Exception ex) {
		}
		System.out.println(myFile.getAbsolutePath());
		// JOptionPane.showMessageDialog(null, "File Saved !", "Success",
		// JOptionPane.INFORMATION_MESSAGE);
	}

	/**
	 * saving the file's bytearray
	 * 
	 * @param fileName
	 *            the name of file to save the received byteArray of File
	 */
	public void saveFileByteArray(final String fileName, final byte[] arrFile) {
		try {
			fos = new FileOutputStream(fileName);
			fos.write(arrFile);
			fos.close();
		} catch (final Exception ex) {
			System.err.println("Error during saving wave file " + fileName + " to disk" + ex.toString());
		}
		System.out.println("WAV Audio data saved to " + fileName);
	}
}
