package main

import (
	"os"
	"hellogo/pipeline"
	"fmt"
	"bufio"
)

func main(){
	p:=createPipeline("small.in",512,4)
	writeToFile(p,"small.out")
	printFile("small.out")
}
func createPipeline(filename string,fileSize,chunkCount int)<- chan int {
	chunkSize:=fileSize/chunkCount
	sortResults:=[]<-chan int{}
	for i:=0;i<chunkCount;i++ {
		file,err:=os.Open(filename)
		if err!=nil {
			panic(err)
		}
		//跳转到文本中的某处，并返回此处的偏移量
		file.Seek(int64(i*chunkSize),0)
		source:=pipeline.ReaderSource(bufio.NewReader(file),chunkSize)
		//pipeline.InMemSort(source)
		sortResults=append(sortResults,pipeline.InMemSort(source))
	}
	return pipeline.MergeN(sortResults...)
}
func writeToFile(p <-chan int,filename string) {
	file,err:=os.Create(filename)
	if err!=nil {
		panic(err)
	}
	defer file.Close()
	writer:=bufio.NewWriter(file)
	defer writer.Flush()
	pipeline.WriteSink(writer,p)
}
func printFile(filename string) {
	file,err:=os.Open(filename)
	if err!=nil {
		panic(err)
	}
	defer file.Close()
	p:=pipeline.ReaderSource(file,-1)
	for v:=range p {
		fmt.Println(v)
	}
}
func largeSort() {
	const filename = "large.in"
	const n = 100000000
	file, err := os.Create(filename)
	if err != nil {
		panic(err)
	}
	defer file.Close()
	p := pipeline.RandomSource(n)
	writer := bufio.NewWriter(file)
	pipeline.WriteSink(writer, p)
	writer.Flush()
	file, err = os.Open(filename)
	if err != nil {
		panic(err)
	}
	defer file.Close()
	p = pipeline.ReaderSource(bufio.NewReader(file), -1)
	count := 100
	for v := range p {
		fmt.Println(v)
		count++
		if count >= 100 {
			break
		}
	}
}

func smallSort() {
	file, err := os.Create("small.in")
	if err != nil {
		panic(err)
	}
	defer file.Close()
	p := pipeline.RandomSource(50)
	pipeline.WriteSink(file, p)
	file, err = os.Open("small.in")
	if err != nil {
		panic(err)
	}
	defer file.Close()
	p = pipeline.ReaderSource(file,-1)
	for v := range p {
		fmt.Println(v)
	}
}
