// Copyright 2016-2023, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package provider

import (
	"bytes"
	"encoding/json"
	"fmt"
	"os"
	"regexp"
	"strings"

	"github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfbridge"
	"github.com/pulumi/pulumi-terraform-bridge/v3/pkg/tfgen"
)

func editRules(defaults []tfbridge.DocsEdit) []tfbridge.DocsEdit {
	return append(defaults,
		fixUpCloudFrontPublicKey,
		fixUpEcsServiceName,
		fixUpBucketReplicationConfig,
		fixUpWebAclExample,
		fixUpWebAclRuleGroupAssociationExample,
		// This fixes up strings such as:
		//
		//	name        = "terraform-kinesis-firehose-os",
		//
		// Replacing the above with:
		//
		//	name        = "pulumi-kinesis-firehose-os"
		//
		simpleReplace(
			`"terraform-`,
			`"pulumi-`),
		simpleReplace(
			"If omitted, Terraform will assign a random, unique name.",
			"If omitted, the provider will assign a random, unique name."),
		simpleReplace(
			"plantimestamp()",
			`"plantimestamp()"`,
		),
		simpleReplace("Read more about sensitive data in state.\n\n", ""),
		reReplace(`(?m)^(\s*)Terraform resource `, "${1}Resource "),
		reReplace(`(?m)^(\s*)Terraform data source `, "${1}Data source "),

		applyReplacementsDotJSON(),

		targetedReplace("iam_role_policies_exclusive.html.markdown", "Terraform", "Pulumi"),
		addResourceNote("iam_role_policies_exclusive.html.markdown",
			"To reliably detect drift between customer managed inline policies listed in this resource and"+
				" actual policies attached to the role in the cloud, you currently need to run Pulumi"+
				" with `pulumi up --refresh`."+
				" See [#4766](https://github.com/pulumi/pulumi-aws/issues/4766) for tracking making"+
				" this work with regular `pulumi up` invocations."),

		targetedReplace("iam_role_policy_attachments_exclusive.html.markdown", "Terraform", "Pulumi"),
		addResourceNote("iam_role_policy_attachments_exclusive.html.markdown",
			"To reliably detect drift between customer managed policies listed in this resource and actual"+
				" policies attached to the role in the cloud, you currently need to run Pulumi with"+
				" `pulumi up --refresh`."+
				" See [#4766](https://github.com/pulumi/pulumi-aws/issues/4766)"+
				" for tracking making this work with regular `pulumi up`"),

		targetedReplace("iam_policy_attachment.html.markdown", "Terraform", "Pulumi"),
	)
}

// Adds a NOTE right under the resource header.
func addResourceNote(resourceFile string, markdownNote string) tfbridge.DocsEdit {
	return tfbridge.DocsEdit{
		Path: resourceFile,
		Edit: func(_ string, content []byte) ([]byte, error) {
			re := regexp.MustCompile(`[#] Resource: [\w-]+`)
			return re.ReplaceAllFunc(content, func(matching []byte) []byte {
				return append(append([]byte{}, matching...), []byte(
					fmt.Sprintf("\n\n> **NOTE:**: %s", markdownNote),
				)...)
			}), nil
		},
	}
}

func simpleReplace(from, to string) tfbridge.DocsEdit {
	fromB, toB := []byte(from), []byte(to)
	return tfbridge.DocsEdit{
		Path: "*",
		Edit: func(_ string, content []byte) ([]byte, error) {
			return bytes.ReplaceAll(content, fromB, toB), nil
		},
	}
}

func targetedReplace(filePath, from, to string) tfbridge.DocsEdit {
	fromBytes, toBytes := []byte(from), []byte(to)
	return tfbridge.DocsEdit{
		Path: filePath,
		Edit: func(path string, content []byte) ([]byte, error) {
			if bytes.Contains(content, fromBytes) {
				content = bytes.ReplaceAll(
					content,
					fromBytes,
					toBytes)
			} else {
				// Hard error to ensure we keep this content up to date
				return nil, fmt.Errorf("could not find text in upstream %s, "+
					"please verify replace content in doc_edits.go: %s", filePath, string(fromBytes))
			}
			return content, nil
		},
	}
}

var fixUpCloudFrontPublicKey = targetedReplace("cloudfront_public_key.html.markdown",
	"* `name` - (Optional) The name for the public key. By default generated by this provider.",
	"* `name` - (Optional) The name for the public key. By default generated by this provider. "+
		"Note: Do not set if using the key's id in another resource (e.g. KeyGroup) "+
		"since it will result in a dependency error from AWS. "+
		"Instead, it is recommended to use Pulumi autonaming by leaving this property unset (default behavior) "+
		"or set the `namePrefix` property to allow the provider to autoname the resource.\n")

var fixUpEcsServiceName = tfbridge.DocsEdit{
	Path: "ecs_service.html.markdown",
	Edit: func(path string, content []byte) ([]byte, error) {
		// The bridge cannot differentiate between resource and data source doc files.
		// This check makes sure we're not checking the data source file.
		if bytes.Contains(content, []byte("The ECS Service data source")) {
			return content, nil
		}
		type replace struct {
			from []byte
			to   []byte
		}
		replaces := []replace{
			{
				from: []byte(
					"* `force_new_deployment` - (Optional) Enable to force a new task deployment of the service. " +
						"This can be used to update tasks to use a newer Docker image with same image/tag combination " +
						"(e.g., `myimage:latest`), roll Fargate tasks onto a newer platform version, or immediately deploy " +
						"`ordered_placement_strategy` and `placement_constraints` updates.",
				),
				to: []byte("* `force_new_deployment` - (Optional) Enable to force a new task deployment of the service. " +
					"This can be used to update tasks to use a newer Docker image with same image/tag combination " +
					"(e.g., `myimage:latest`), roll Fargate tasks onto a newer platform version, or immediately deploy " +
					"`ordered_placement_strategy` and `placement_constraints` updates.\n" +
					"When using the forceNewDeployment property you also need to configure the triggers property.\n",
				),
			},
			{
				from: []byte(
					"* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger " +
						"an in-place update (redeployment). Useful with `plantimestamp()`. See example above.\n",
				),
				to: []byte(
					"* `triggers` - (Optional) Map of arbitrary keys and values that, when changed, will trigger " +
						"an in-place update (redeployment). Useful with `plantimestamp()`. " +
						"When using the triggers property you also need to set the forceNewDeployment property to True.\n",
				),
			},
		}
		for _, text := range replaces {
			if bytes.Contains(content, text.from) {
				content = bytes.ReplaceAll(
					content,
					text.from,
					text.to)
			} else {
				// Hard error to ensure we keep this content up to date
				return nil, fmt.Errorf("could not find text in upstream %s, "+
					"please verify replace content in doc_edits.go: %s", path, string(text.from),
				)
			}
		}
		return content, nil
	},
}

var fixUpWebAclExample = tfbridge.DocsEdit{
	Path: "wafv2_web_acl.html.markdown",
	Edit: func(path string, content []byte) ([]byte, error) {
		return tfgen.SkipSectionByHeaderContent(content, func(headerText string) bool {
			return strings.Contains(headerText, "Example Usage")
		})
	},
}

var fixUpWebAclRuleGroupAssociationExample = tfbridge.DocsEdit{
	Path: "wafv2_web_acl_rule_group_association.html.markdown",
	Edit: func(path string, content []byte) ([]byte, error) {
		return tfgen.SkipSectionByHeaderContent(content, func(headerText string) bool {
			return strings.Contains(headerText, "Custom Rule Group - With Rule Action Overrides") || strings.Contains(headerText, "Custom Rule Group - CloudFront Web ACL")
		})
	},
}

var fixUpBucketReplicationConfig = tfbridge.DocsEdit{
	Path: "s3_bucket_replication_configuration.html.markdown",
	Edit: func(path string, content []byte) ([]byte, error) {
		fromBytes := []byte(
			"* `rule` - (Required) List of configuration blocks describing the rules managing the replication. " +
				"[See below](#rule).\n")
		toBytes := []byte(
			"* `rule` - (Required) List of configuration blocks describing the rules managing the replication. " +
				"[See below](#rule).\n" +
				"~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified " +
				"in the `rule` object. If the corresponding rule requires no filter, an empty configuration block " +
				"`filter {}` must be specified." +
				"\n\n~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, " +
				"which includes the `filter` attribute for replication rules.\n\n" +
				"~> **NOTE:** The `existingObjectReplication` parameter is not supported by Amazon S3 at this time " +
				"and should not be included in your `rule` configurations. " +
				"Specifying this parameter will result in `MalformedXML` errors.\n" +
				"To replicate existing objects, please refer to the " +
				"[Replicating existing objects with S3 Batch Replication]" +
				"(https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-batch-replication-batch.html) " +
				"documentation in the Amazon S3 User Guide.\n",
		)
		noteBytes := []byte(
			"~> **NOTE:** Replication to multiple destination buckets requires that `priority` is specified " +
				"in the `rule` object. If the corresponding rule requires no filter, an empty configuration block " +
				"`filter {}` must be specified." +
				"\n\n~> **NOTE:** Amazon S3's latest version of the replication configuration is V2, " +
				"which includes the `filter` attribute for replication rules.\n\n" +
				"~> **NOTE:** The `existing_object_replication` parameter is not supported by Amazon S3 at this time " +
				"and should not be included in your `rule` configurations. " +
				"Specifying this parameter will result in `MalformedXML` errors.\n" +
				"To replicate existing objects, please refer to the [Replicating existing objects with S3 Batch Replication]" +
				"(https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-batch-replication-batch.html) " +
				"documentation in the Amazon S3 User Guide.\n",
		)

		if bytes.Contains(content, noteBytes) && bytes.Contains(content, fromBytes) {
			content = bytes.ReplaceAll(
				content,
				fromBytes,
				toBytes)
		} else {
			// Hard error to ensure we keep this content up to date
			return nil, fmt.Errorf("could not find NOTE snippets in upstream %s, "+
				"please verify replace content in doc_edits.go: %s", path, string(noteBytes))
		}
		return content, nil
	},
}

func reReplace(from string, to string) tfbridge.DocsEdit {
	fromR, toB := regexp.MustCompile(from), []byte(to)
	return tfbridge.DocsEdit{
		Path: "*",
		Edit: func(_ string, content []byte) ([]byte, error) {
			return fromR.ReplaceAll(content, toB), nil
		},
	}
}

// Apply replacements from `replacements.json` (as search and replace) to read in docs.
func applyReplacementsDotJSON() tfbridge.DocsEdit {
	filePath := "./provider/replacements.json"
	replacements := make(replacementFile)
	replacements.mustReadJSONFile(filePath)

	fmt.Printf("Gathered %d replacements\n", len(replacements))
	var applied int

	// Print the number of replacements actually applied, then write out the new
	// `replacements.json` with new TODOs for elided text.
	PostTfgenHook = append(PostTfgenHook, func() {
		fmt.Printf("Applied %d replacements\n", applied)
		var b bytes.Buffer
		m := json.NewEncoder(&b)
		m.SetEscapeHTML(false)
		m.SetIndent("", "  ")
		err := m.Encode(replacements)
		if err != nil {
			panic(err)
		}
		err = os.WriteFile(filePath, b.Bytes(), 0600)
		if err != nil {
			panic(err)
		}
	})

	edit := func(path string, content []byte) ([]byte, error) {
		replacementPath, ok := replacements[path]
		if !ok {
			return content, nil
		}
		applied++
		for _, r := range replacementPath {
			// If no-one has fixed up the TODO, don't replace it. That way the
			// text will show up as elided instead of including the TODO in
			// user facing docs.
			if r.New == elidedText.ReplaceAllLiteralString(r.Old, "TODO") {
				continue
			}

			old, new := []byte(r.Old), []byte(r.New)

			content = bytes.ReplaceAll(content, old, new)
		}

		replacements.checkForTODOs(path, content)

		return content, nil
	}

	return tfbridge.DocsEdit{
		Path: "*",
		Edit: edit,
	}
}

var PostTfgenHook []func()

type replacementFile map[string][]replacement

type replacement struct {
	Old     string `json:"old"`
	New     string `json:"new"`
	wasUsed bool
}

var elidedText = regexp.MustCompile("[tT]erraform")

func (r replacementFile) checkForTODOs(path string, content []byte) {
	var end int
	for _, m := range elidedText.FindAllIndex(content, -1) {
		// If we see ```terraform, we skip that
		if i := m[0]; i > 3 && (content[i-1] == '`' &&
			content[i-2] == '`' && content[i-3] == '`') {
			continue
		}
		if m[0] < end {
			// This match is on the same line as the previous
			// match, so we don't need to add it twice.
			continue
		}
		var start int
		start, end = findLine(content, m[0])
		line := string(content[start:end])

		r.addReplacement(path, replacement{
			Old:     line,
			New:     elidedText.ReplaceAllLiteralString(line, "TODO"),
			wasUsed: true,
		})
	}
}

// Checks if file under path already has a replacement specified for a string old.
func (r replacementFile) hasReplacement(path string, old string) bool {
	for _, oldReplacement := range r[path] {
		if oldReplacement.Old == old {
			return true
		}
	}
	return false
}

func (r replacementFile) addReplacement(path string, repl replacement) {
	// Adding a replacement for the same old string would be no-op since replacements are
	// applied sequentially. Skip adding it.
	if r.hasReplacement(path, repl.Old) {
		return
	}
	r[path] = append(r[path], repl)
}

func (r replacementFile) mustReadJSONFile(filePath string) {
	fileBytes, err := os.ReadFile(filePath)
	if err != nil {
		panic(err)
	}
	var replacements replacementFile
	err = json.Unmarshal(fileBytes, &replacements)
	if err != nil {
		panic(err)
	}
	for path, rs := range replacements {
		for _, repl := range rs {
			r.addReplacement(path, repl)
		}
	}
}

func findLine(src []byte, i int) (int, int) {
	start, end := i, i
	for start > 0 && src[start-1] != '\n' {
		start--
	}
	for end < len(src) && src[end] != '\n' {
		end++
	}
	return start, end + 1
}

// Attempts to read the contents of a given file but returns an empty array in case of failure. This is useful for
// overriding documentation files at generation time only.
func maybeReadFile(filePath string) []byte {
	fileBytes, err := os.ReadFile(filePath)
	if err != nil {
		return nil
	}
	return fileBytes
}
