#include <ctype.h>
#include <stdarg.h>

#include "odump.h"

int odump_encode(int infd, int outfd, struct odump_opts* opts) {
	FILE* in = fdopen(infd, "r");
	FILE* out = fdopen(outfd, "w");
	int cc;	// caracter de control
	int lenght = 0;	// cantidad de nros octales 'dumpeados' en la linea
	int exit_status = OK;

	while(!exit_status && (cc = fgetc(in)) != -1) {	// -1 = EOF
		unsigned char c = cc; // caracter actual
		if (opts->length != 0 && lenght >= opts->length) {
			if(fprintf(out, "\n", 0) < 0) {
				fprintf(stderr, "Error de escritura en el stream de salida\n");
				exit_status = FATAL_ERROR;
			}
			lenght = 0;
		} else if (lenght != 0)
			if(fprintf(out, "%s", opts->delim) < 0) {
				fprintf(stderr, "Error de escritura en el stream de salida\n");
				exit_status = FATAL_ERROR;
			}
		if(fprintf(out, "%03o", c) < 0) { // escribo el caracter codificado
			fprintf(stderr, "Error de escritura en el stream de salida\n");
			exit_status = FATAL_ERROR;
		}
		
		lenght++;
	}

	fflush(out);
	return exit_status;
}

int odump_decode(int infd, int outfd, struct odump_opts* opts) {
	FILE* in = fdopen(infd, "r");
	FILE* out = fdopen(outfd, "w");
	int exit_status = OK;
	char oc[4];  // nro octal

	while(!exit_status && fgets(oc, sizeof(oc), in)) {
		if(isdigit((unsigned char) oc[0]) &&
			isdigit((unsigned char) oc[1]) &&
			isdigit((unsigned char) oc[2])) {
			unsigned char o;
			
			sscanf(oc, "%o", (int*)&o);	// realizo la conversion a 'binario'
			
			if(fprintf(out, "%c", o) < 0) { // escribo el caracter decodificado
				fprintf(stderr, "Error de escritura en el stream de salida\n");
				exit_status = FATAL_ERROR;
			}
			
			if(!(fgetc(in) == '\n'))  // si no es salto de linea, salteo el delimitador
				if(!feof(in))
					fseek(in, strlen(opts->delim) - 1, SEEK_CUR);
		}
		else {
			fprintf(stderr, "Entrada invalida: (%s). Se esperaba un nro octal\n", oc);
			exit_status = ABORT;
		}
	}

	fflush(out);
	return exit_status;
}
