// Copyright 2013 The golog2bq Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

// +build appengine

package log2bq

import (
	"appengine"
	"appengine/file"
	"appengine/log"
	"compress/gzip"
	"fmt"

	"code.google.com/p/google-api-go-client/bigquery/v2"
)

// You don't need this unless you plan on writing custom HTTP handlers.
type Logger struct {
	DatasetID        string
	ProjectID        string
	WriteDisposition string
	GSBucketName     string
}

// InitLogger initializes the globla Logger used by the builtin HTTP handlers.
func InitLogger(datasetID, projectID, writeDisposition, gsBucketName string) {
	globalLogger = &Logger{
		DatasetID:        datasetID,
		ProjectID:        projectID,
		WriteDisposition: writeDisposition,
		GSBucketName:     gsBucketName,
	}
}

func (l *Logger) newLog2bqConfig(c appengine.Context, srcURI ...string) *bigquery.Job {
	return &bigquery.Job{
		Configuration: &bigquery.JobConfiguration{
			Load: &bigquery.JobConfigurationLoad{
				DestinationTable: &bigquery.TableReference{
					DatasetId: l.DatasetID,
					ProjectId: l.ProjectID,
					TableId:   fixTableName(appengine.AppID(c)),
				},
				SourceUris:       srcURI,
				WriteDisposition: l.WriteDisposition,
				Schema: &bigquery.TableSchema{
					Fields: []*bigquery.TableFieldSchema{
						&bigquery.TableFieldSchema{Name: "AppID", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "VersionID", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "RequestID", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "IP", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Nickname", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "AppEngineRelease", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "StartTime", Type: "TIMESTAMP"},
						&bigquery.TableFieldSchema{Name: "EndTime", Type: "TIMESTAMP"},
						&bigquery.TableFieldSchema{Name: "Latency", Type: "INTEGER"},
						&bigquery.TableFieldSchema{Name: "MCycles", Type: "INTEGER"},
						&bigquery.TableFieldSchema{Name: "Method", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Resource", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "HTTPVersion", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Status", Type: "INTEGER"},
						&bigquery.TableFieldSchema{Name: "ResponseSize", Type: "INTEGER"},
						&bigquery.TableFieldSchema{Name: "Referrer", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "UserAgent", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "URLMapEntry", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Combined", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Host", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "Cost", Type: "FLOAT"},
						&bigquery.TableFieldSchema{Name: "TaskQueueName", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "TaskName", Type: "STRING"},
						&bigquery.TableFieldSchema{Name: "WasLoadingRequest", Type: "BOOLEAN"},
						&bigquery.TableFieldSchema{Name: "PendingTime", Type: "INTEGER"},
						&bigquery.TableFieldSchema{Name: "Finished", Type: "BOOLEAN"},
					},
				},
			},
		},
	}
}

// Log2gs will gzip the CSV results of the log.Query and save it to Google Cloud
// Storage.
func (l *Logger) Log2gs(c appengine.Context, q *log.Query) (string, error) {
	if l.GSBucketName == "" {
		return "", ErrInvalidGSBucket
	}
	o := &file.CreateOptions{
		MIMEType:   "application/gzip",
		BucketName: l.GSBucketName,
	}
	var n string
	if q.StartTime.IsZero() {
		n = fmt.Sprintf("%s.log.csv.gz", appengine.AppID(c))
	} else {
		n = fmt.Sprintf("%s_%d-%d.log.csv.gz", appengine.AppID(c), q.StartTime.Unix(), q.EndTime.Unix())
	}
	f, fn, err := file.Create(c, n, o)
	if err != nil {
		return fn, err
	}
	gw := gzip.NewWriter(f)
	err = log2csv(gw, c, q)
	if err != nil {
		return fn, err
	}
	err = gw.Close()
	if err != nil {
		return fn, err
	}
	err = f.Close()
	if err != nil {
		return fn, err
	}
	return fn, nil
}

// Gs2bq will insert a BigQuery job to ingest log data previously saved to
// Google Cloud Storage.
func (l *Logger) Gs2bq(c appengine.Context, fn string) error {
	t, err := GAETransport(c, bigquery.BigqueryScope)
	if err != nil {
		return err
	}
	client, err := t.Client()
	if err != nil {
		return err
	}
	s, err := bigquery.New(client)
	if err != nil {
		return err
	}
	job := l.newLog2bqConfig(c, gsNameFix(fn))
	jobCall := s.Jobs.Insert(l.ProjectID, job)
	_, err = jobCall.Do()
	return err
}

// Log2gs will gzip the CSV results of the log.Query and ingest it directly to
// BigQuery.
func (l *Logger) Log2bq(c appengine.Context, q *log.Query) error {
	t, err := GAETransport(c, bigquery.BigqueryScope)
	if err != nil {
		return err
	}
	client, err := t.Client()
	if err != nil {
		return err
	}
	s, err := bigquery.New(client)
	if err != nil {
		return err
	}
	data := newLogData()
	err = log2csv(data, c, q)
	if err != nil {
		return err
	}
	err = data.doneWriting()
	if err != nil {
		return err
	}
	job := l.newLog2bqConfig(c)
	jobCall := s.Jobs.Insert(l.ProjectID, job)
	jobCall.Media(data)
	_, err = jobCall.Do()
	return err
}
