/*
 *  treehash.c
 *
 * Set of functions for creating/manipulating/comparing SHA1 hash
 * tree for use with files, mostly for P2P applications.  
 * Built on mini_db for storage purposes.  These function 
 * are by no means standardized, and provides interpolarity only among itself.
 *
 *
 * To obtain the root hash just run treehash_sample, and treehash_promote until
 * it returns non-zero.  To obtain the root hash just mdb_get name "hash-0-0".
 *
 * Because mini_db is the storgage media, all items are stored by name and value.
 * This includes hashes, and even options.
 * Hash are named "hash-#depth_off-#item_off" and store SHA1 hashes. 
 * The only setting used is in treehash_sample, which is "samples", its value is a
 * number, however it is still a string.
 * 
 * Options:
 *    "filename" - Exact path passed to treehash_sample
 *    "filesize" - off_t of filename's filesize
 *    "sample"   -  Size of any given sample.
 *    "curdepth" - Includes the value of the last computed depth (NOTE: is offset, root is 0)
 *    "method"   - Hash scheme used (only SHA1 supported)
 *    "bad"      - Placed in questionable hash during treehash_cmp
 *    "hash-0-0" - Root hash
 *    "hash-1-0" - Left hash used to generate root
 *    "hash-1-1" - Right hash used to generate root
 *    "hash-2-0" - Left hash used to generate "hash-1-0"
 *   .........And so.......
 *
 *
 * Imagine a tree, depths are formed by sample, or the depth ABOVE them!
 *
 *   Depth:  |         Tree:
 * ----------+------------------------------
 *     0     |                  0
 *     1     |             0_/     \_1
 *     2     |         0_/   \_1 2_/  \_3
 *     3     |       0/\1  2/\3  4/\5  6/\7
 *
 * If you want to ESIMATE how many items are at any given depth, 
 * just use this formula:
 *            items = 2^depth
 *		
 *  Check out http://open-content.net/specs/draft-jchapweske-thex-02.html
 *  for a SIMILAR application.  Please note it is not the same, and these
 *  function will produce different hashes!
 *
 * Functions:
 * 	treehash_promote:  
 * 		Args:     st_hashtree data and option to delete previous hash
 *			   depth.
 * 		Returns:  zero when not root, when root has been calculated
 *			   non-zero is returned.
 *     treehash_sample:  
 * 		Args:     st_hashtree data and filename
 * 		Returns:  Calculated depth of tree.
 *
 * Also:
 *     A st_treehash may be
 *     manipulated by mini_db
 *     mdb functions.
 *
 * Copyright (c) 2006 Karlan Thomas Mitchell<karlanmitchell@gmail.com>
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 * 3. All advertising materials mentioning features or use of this software
 *    must display the following acknowledgement:
 *         This product includes software developed by
           Karlan Thomas Mitchell<karlanmitchell@gmail.com>
 * 4. Neither Karlan Thomas Mitchell nor the names of its
 *    contributors may be used to endorse or promote products derived
 *    from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY KARLAN THOMAS MITCHELL AND CONTRIBUTORS
 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGE.
 */

/*
!!!! FLOATING POINT CALCULATION IS REQUIRED !!!!!!!!!!!
!!!! IF YOU KNOW ANOTHER FORMULA, PLEASE INFORM ME! !!!


Formula for calculating depth:

	depth = roundup(log10(hash_samples) / log10(2));



*/




#include "treehash.h"



char * treehash_gethash(struct st_treehash * hash_data, int depth, int item){
	char hash_id[50];
	snprintf(hash_id, 50, "hash-%d-%d", depth, item);
	return treehash_get(hash_data, hash_id);
	}

int treehash_sethash(struct st_treehash * hash_data, int depth, int item, char * hash){
	char hash_id[50];
	snprintf(hash_id, 50, "hash-%d-%d", depth, item);
	return treehash_set_hash(hash_data, hash_id, hash, treehash_get_int(hash_data, "digestlen"));
	}

int treehash_setleafhash(struct st_treehash * hash_data, int depth, int item, char * hash, off_t offset, size_t sample_size){
	char hash_id[50];
	char rangebuf[50];
	snprintf(hash_id, 50, "hash-%d-%d", depth, item);
	treehash_set_hash(hash_data, hash_id, hash, treehash_get_int(hash_data, "digestlen"));

	snprintf(hash_id, 50, "range-%d-%d", depth, item);
	snprintf(rangebuf, 50, "%lld-%d", offset, offset + sample_size);
	treehash_set(hash_data, hash_id, rangebuf);
	return 0;
	}

size_t treehash_items(struct st_treehash * hash_data, int depth){
	size_t items = 0;

	while(treehash_gethash(hash_data, depth, items) != NULL){
		items++;
                }
	return items;
	}

int badzero(const char * filename, off_t start, off_t end){
		printf("zeroing: %s %lld - %lld\n", filename, start, end);
	FILE *  fzero = fopen(filename, "r+");
	if(fzero == NULL)
		return -1;
	fseek(fzero, (long)start, SEEK_SET);

	off_t cur = start;
	while(cur <= end){
		fwrite("?", 1, 1, fzero);
		cur++;
	}
	

	fclose(fzero);
	return 0;
	}

/*
treehash_badrun:
	Runs badfunc(filename, start, end) for as many items are in "bad" (see header comments)
*/
int treehash_badrun(struct st_treehash * hash_data,  int(*badfunc)(const char *, off_t, off_t)  ){
	char * bad_blocks;
	char * bad_pntr = treehash_get(hash_data, "bad");
	char * filename = treehash_get(hash_data, "filename");
	if(bad_pntr == NULL)
		return 0;

	size_t bad_len = strlen(bad_pntr);
	bad_blocks     = malloc((bad_len + 1) * sizeof(char));
	strcpy(bad_blocks, bad_pntr);



	char * strtok_buf;
	off_t start;
	off_t end;

	bad_pntr = strtok_r(bad_blocks, ", ", &strtok_buf);


	while(bad_pntr != NULL){
		decode_range(bad_pntr, getfilesize(filename) - 1, &start, &end);
		
		badfunc(filename, start, end);
		
		bad_pntr = strtok_r(NULL, ", ", &strtok_buf);
		}


	free(bad_blocks);
	return 0;
	}

int treehash_cmp(struct st_treehash * hash, struct st_treehash * questionable){
	int depth_hash         = treehash_get_int(hash, "maxdepth");
	int depth_questionable = treehash_get_int(questionable, "maxdepth");
	int return_val = 0;
	int cur_depth;
	int  buf_len = 1024;
	char buffer[buf_len];
	char * hash_questionable;
	char * hash_answer;
	size_t hash_c = 0;
	off_t filesize = treehash_get_off_t(hash, "filesize");

	if(strcasecmp(mdb_print(hash, "sample"), mdb_print(questionable, "sample")) != 0){
		printf("FATAL:  It is impossible for these hashes to match!\n");
		printf("REASON: Different sample sizes!\n");
		return 1;
	}

	if(strcasecmp(mdb_print(hash, "filesize"), mdb_print(questionable, "filesize")) != 0){
		printf("FATAL: It is impossible for these hashes to match!\n");
		printf("REASON: Different file sizes!\n");
		return 1;
	}

	if(strcasecmp(mdb_print(hash, "method"), mdb_print(questionable, "method")) != 0){
		printf("FATAL: It is impossible for these hashes to match!\n");
		printf("REASON: Different digest methods!\n");
		return 1;
	}

	/*We want the best resolution so we don't have to throw away everything*/
	int min_depth = depth_hash;
	if(depth_questionable < min_depth)
		min_depth = depth_questionable;

	
	/*We are going to copy the hash trees into buffers, so we can mess with them*/
	struct st_treehash t_hash;
	struct st_treehash t_questionable;

	treehash_init(&t_hash);
	treehash_init(&t_questionable);

	treehash_copy(&t_hash, hash);	
	treehash_copy(&t_questionable, questionable);	


	/*If the hash trees are not at the same resoultion, promote them*/
	if(depth_hash != depth_questionable){
		cur_depth = treehash_get_int(&t_hash, "curdepth");

		while(cur_depth > min_depth){
			treehash_promote(&t_hash, 1);
			cur_depth--;
		}
//         	mdb_printall(&t_hash);


		cur_depth = treehash_get_int(&t_questionable, "curdepth");

		while(cur_depth > min_depth){
			treehash_promote(&t_questionable, 1);
			cur_depth--;
		}
//         	mdb_printall(&t_questionable);

		}
	/*
	if(strcasecmp(treehash_gethash(&t_hash, 0, 0), treehash_gethash(&t_questionable, 0, 0)) != 0)
		return_val = 1;*/

	int items = treehash_items(&t_questionable, min_depth);

	
	for(hash_c = 0 ;  ; hash_c++){
		hash_questionable = treehash_gethash(&t_hash, min_depth, hash_c);
		hash_answer       = treehash_gethash(&t_questionable, min_depth, hash_c);

		if(hash_questionable == NULL && 
                   hash_answer == NULL)
			break;

		if(hash_questionable == NULL || 
                   hash_answer == NULL){
			return_val = 1;
			break;
			}

		if(strcasecmp(hash_questionable, hash_answer) != 0){
			return_val = 1;


			

			snprintf(buffer, buf_len, "range-%d-%d", min_depth, hash_c);
			
			mdb_cat(questionable, "bad", mdb_get(&t_hash, buffer));
			mdb_cat(questionable, "bad", " ");

 			printf("****: %s/%s(%d-%d)\n", hash_answer, hash_questionable, min_depth, hash_c);
// want to see all hashes..   break;
			}
 		else
 			printf("hash: %s\n", hash_answer);
	}


	treehash_free(&t_hash);
	treehash_free(&t_questionable);

	return return_val;
	}

int treehash_promote(struct st_treehash * hash_data, int delete_previous){
	char * hash_a;
	char * hash_b;
	int    depth = mdb_get_int(hash_data, "curdepth");
	char buffer[100];
	char hasha_id[50];
	char hashb_id[50];
	char hasha_rid[50];
	char hashb_rid[50];
	int cur_sample = 0;
	

	int digest_type = 0;
	
	if     (strcasecmp(treehash_print(hash_data, "method"), "SHA1") == 0)
		digest_type = SHA1_DIGEST;
	else if(strcasecmp(treehash_print(hash_data, "method"), "MD5") == 0)
		digest_type = MD5_DIGEST;
	else if(strcasecmp(treehash_print(hash_data, "method"), "MD4") == 0)
		digest_type = MD4_DIGEST;


	if(depth == 0){
// 		printf("Already Root Hash Present, Can't Promote!\n");
		return 0;		
		}
   	printf("Promoting: %d\n", depth);

	mdb_set_int(hash_data, "curdepth", depth - 1);
	while(1){
		hash_a = NULL;
		hash_b = NULL;	
		

		snprintf(hasha_id, 50, "hash-%d-%d", depth, cur_sample);
		snprintf(hasha_rid, 50, "range-%d-%d", depth, cur_sample);
		hash_a = treehash_gethash(hash_data, depth, cur_sample);
		cur_sample++;
		if(hash_a == NULL)
			break;


		snprintf(hashb_id, 50, "hash-%d-%d", depth, cur_sample);
		snprintf(hashb_rid, 50, "range-%d-%d", depth, cur_sample);
		hash_b = treehash_gethash(hash_data, depth, cur_sample);
		cur_sample++;



		if(hash_b != NULL){	
			char hash_id[50];
			char new_hash[100];
			char new_digest[20];
			char new_range[50];
			char * range_a;
			char * range_b;

			snprintf(hash_id, 50, "range-%d-%d", depth - 1, roundup((cur_sample + 1) / 2) - 1);

			range_a = mdb_get(hash_data, hasha_rid);
			range_b = mdb_get(hash_data, hashb_rid);
			snprintf(new_range, 50, "%lld-%lld", atoll(range_a), atoll(&*(strchr(range_b, '-') + 1)) );

			treehash_set(hash_data, hash_id, new_range);
			

			snprintf(hash_id, 50, "hash-%d-%d", depth - 1, roundup((cur_sample + 1) / 2) - 1);
			snprintf(new_hash, 100, "%c%c", hash_a, hash_a);

			if(digest_type == SHA1_DIGEST)
				sha1_buffer((unsigned char *)new_hash, new_digest);
			else if(digest_type == MD5_DIGEST)
				md5_buffer((unsigned char *)new_hash, new_digest);
			else if(digest_type == MD4_DIGEST)
				md4_buffer((unsigned char *)new_hash, new_digest);

			treehash_sethash(hash_data, depth - 1, roundup((cur_sample + 1) / 2) - 1, new_digest);

		

		}
		else{
			/*Extra hash get promoted until there is an even number of items in the depth.*/

			

			char hash_id[50];
			snprintf(hash_id, 50, "range-%d-%d", depth - 1, roundup((cur_sample + 1) / 2) - 1);
			treehash_set(hash_data, hash_id, mdb_get(hash_data, hasha_rid));

			treehash_sethash(hash_data, depth - 1, roundup((cur_sample + 1) / 2) - 1, hash_a);
		}


		if(delete_previous){	
			if(depth == mdb_get_int(hash_data, "maxdepth"))
				mdb_set_int(hash_data, "maxdepth", depth - 1);

			mdb_remove(hash_data, hasha_id);
			mdb_remove(hash_data, hashb_id);
			mdb_remove(hash_data, hasha_rid);
			mdb_remove(hash_data, hashb_rid);
		}


	}	


	return depth - 1;
}

int treehash_sample(struct st_treehash * hash_data, char * filename, int digest_type){
	char digest[42];
	char buffer[50];

	off_t filesize = getfilesize(filename);
	int sample_size = mdb_get_int(hash_data, "sample");
	int total_samples;

	if(sample_size == 0)
 		sample_size = DEFAULT_SAMPLE;
	if(sample_size > filesize){
		sample_size = filesize;
		}
	if(sample_size > MAX_SAMPLE){
		sample_size = MAX_SAMPLE;
		}
	total_samples = (int)roundup((double)filesize / (double)sample_size);

	int depth  = roundup(log10(total_samples) / log10(2));

	off_t cur_offset = 0;
	int cur_sample = 0;


	if(digest_type == SHA1_DIGEST){
		treehash_set(hash_data, "method", "SHA1");
		treehash_set_int(hash_data, "digestlen", SHA1_DIGEST_LEN);
		}
	else if(digest_type == MD5_DIGEST){
		treehash_set(hash_data, "method", "MD5");
		treehash_set_int(hash_data, "digestlen", MD5_DIGEST_LEN);
		}
	else if(digest_type == MD4_DIGEST){
		treehash_set(hash_data, "method", "MD4");
		treehash_set_int(hash_data, "digestlen", MD4_DIGEST_LEN);
		}
	else{
		printf("FATAL: no valid digest_type chosen!\n");
		return -1;
	}


	treehash_set_off_t(hash_data, "filesize", filesize);
	treehash_set(hash_data, "filename", filename);

	treehash_set_int(hash_data, "sample", sample_size);
	treehash_set_int(hash_data, "total_samples", total_samples);
	treehash_set_int(hash_data, "curdepth", depth);
	treehash_set_int(hash_data, "maxdepth", depth);

	while(cur_offset < filesize){
		if(cur_offset + sample_size >= filesize)
			sample_size = filesize - cur_offset;

		if(digest_type == SHA1_DIGEST)
			sha1_file_range(filename, digest, cur_offset, cur_offset + sample_size );
		else if(digest_type == MD5_DIGEST)
			md5_file_range(filename, digest, cur_offset, cur_offset + sample_size );
		else if(digest_type == MD4_DIGEST)
			md4_file_range(filename, digest, cur_offset, cur_offset + sample_size );

		treehash_setleafhash(hash_data, depth, cur_sample, digest, cur_offset, sample_size - 1);

		cur_sample++;
		cur_offset += sample_size;
	}

	return depth;
}
