/*
 * Copyright (c) 2022 Huawei Device Co., Ltd.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
import {
    Parser,
    parseDocument,
    DomUtils,
    createDocumentStream,
    parseFeed,
    Tokenizer
} from "./htmlParser2.js";

const html = `
    <!DOCTYPE html>
    <html lang="en">
    <head>
        <meta charset="UTF-8">
        <meta http-equiv="X-UA-Compatible" content="IE=edge">
        <meta name="viewport" content="width=device-width, initial-scale=1.0">
        <title>Document</title>
    </head>
    <style>
        .tagh1{
            background-color: aquamarine;
            color:'blue';
        }
        .one-div{
            line-height: 30px;
        }
    </style>
    <body>
        <h1 class="tagh1">
            kkkk
            <p>hhhhh</p>
        </h1>
        <div style="color:red; height:100px;" class="one-div">cshi</div>
        <img src="https:baidu.com" alt="wwww"/>
        <p>wjdwekfe</p>
        <em>dsjfw
        <div>dksfmjk</div>
        owqkdo</em>
    </body>
    </html>
    `

const rssFeed = `<?xml version="1.0"?>
    <!-- http://cyber.law.harvard.edu/rss/examples/rss2sample.xml -->
    <rss version="2.0">
       <channel>
          <title>Liftoff News</title>
          <link>http://liftoff.msfc.nasa.gov/</link>
          <description>Liftoff to Space Exploration.</description>
          <language>en-us</language>
          <pubDate>Tue, 10 Jun 2003 04:00:00 GMT</pubDate>
    
          <lastBuildDate>Tue, 10 Jun 2003 09:41:01 GMT</lastBuildDate>
          <docs>http://blogs.law.harvard.edu/tech/rss</docs>
          <generator>Weblog Editor 2.0</generator>
          <managingEditor>editor@example.com</managingEditor>
          <webMaster>webmaster@example.com</webMaster>
          <item>
    
             <title>Star City</title>
             <link>http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp</link>
             <description>How do Americans get ready to work with Russians aboard the International Space Station? They take a crash course in culture, language and protocol at Russia's &lt;a href="http://howe.iki.rssi.ru/GCTC/gctc_e.htm"&gt;Star City&lt;/a&gt;.</description>
             <pubDate>Tue, 03 Jun 2003 09:39:21 GMT</pubDate>
             <guid>http://liftoff.msfc.nasa.gov/2003/06/03.html#item573</guid>
    
          </item>
       </channel>
    </rss>`

function getEventCollector(callback) {
    const events = []
    let parser
    function handle(event, data) {
        switch (event) {
            case "onerror": {
                callback(data[0]);
                break;
            }
            case "onend": {
                callback(null, {
                    $event: event.slice(2),
                    startIndex: parser.startIndex,
                    endIndex: parser.endIndex,
                    data,
                });
                break;
            }
            case "onreset": {
                events.length = 0;
                break;
            }
            case "onparserinit": {
                parser = data[0];
                break;
            }

            case "onopentag": {
                callback(null, {
                    $event: event.slice(2),
                    startIndex: parser.startIndex,
                    endIndex: parser.endIndex,
                    data,
                });
                break;
            }

            case "ontext": {
                callback(null, {
                    $event: event.slice(2),
                    startIndex: parser.startIndex,
                    endIndex: parser.endIndex,
                    data: data[0],
                })
                break;
            }

            case "onclosetag": {
                if (data[0] === "script") {
                    // console.info("htmlparser2--That's it?!");
                }
                break;
            }
            default: {
                const last = events[events.length - 1];
                if (event === "ontext" && last && last.$event === "text") {
                    (last.data[0]) += data[0];
                    last.endIndex = parser.endIndex;
                    break;
                }

                if (event === "onattribute" && data[2] === undefined) {
                    data.pop();
                }

                if (!(parser.startIndex <= parser.endIndex)) {
                    throw new Error(
                        `Invalid start/end index ${parser.startIndex} > ${parser.endIndex}`,
                    );
                }

                events.push({
                    $event: event.slice(2),
                    startIndex: parser.startIndex,
                    endIndex: parser.endIndex,
                    data,
                });
                parser.endIndex;
            }
        }
    }

    return new Proxy(
        {},
        {
            get:
                (_, event) =>
                    (...data) =>
                        handle(event, data),
        },
    );
}

const forNumber = 200;
const bigNumber = 100000;

function Parser_test() {
    var result = '';
    let arr = [];
    const handler = getEventCollector((error, actual) => {
        if (error) {
            result += "解析失败：" + JSON.stringify(error)
            return
        }
        if (actual.$event == "text") {
            arr.push(actual.data)
        }
        if (actual.$event == "end") {
            result = JSON.stringify(arr);
        }
    })
    let parser;
    let startTime = Date.now();
    for (let i = 0; i < bigNumber; i++) {
        parser = new Parser(handler);
    }
    let endTime = Date.now();
    parser.write("china <script type='text/javascript'>const foo = '<<bar>>';</script>",);
    parser.end();

    if (result != `["china ","const foo = '<<bar>>';"]`) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Parser: ${endTime - startTime} ms`)
};

function Parser_write_end_test() {
    var result = '';
    let arr = [];
    const handler = getEventCollector((error, actual) => {
        if (error) {
            result += "解析失败：" + JSON.stringify(error)
            return
        }
        if (actual.$event == "text") {
            arr.push(actual.data)
        }
        if (actual.$event == "end") {
            result = JSON.stringify(arr);
        }
    })
    let startTime = Date.now();
    for (let i = 0; i < 2000; i++) {
        const parser = new Parser(handler);
        parser.write("china <script type='text/javascript'>const foo = '<<bar>>';</script>",);
        parser.end();
    }
    let endTime = Date.now();
    if (!result) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Parser_Write_end: ${endTime - startTime} ms`)
};

function parseComplete_test() {

    var result = '';
    let arr = [];

    let parser = null;
    const handler = getEventCollector((error, actual) => {
        if (error) {
            result += "解析失败：" + JSON.stringify(error)
            return
        }

        if (actual.$event == "text") {
            arr.push(actual.data)
        }

        if (actual.$event == "end") {
            result = JSON.stringify(arr);
        }
    });
    parser = new Parser(handler);
    let startTime = Date.now()
    for (let i = 0; i < forNumber; i++) {
        parser.parseComplete("Xyz <script type='text/javascript'>const foo = '<<bar>>';</script>")
    }
    let endTime = Date.now()
    if (!result) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_ParserComplete: ${endTime - startTime} ms`)
};

function parseDocument_test() {

    var result = '';
    let dom;
    let startTime = Date.now()
    for (let i = 0; i < forNumber; i++) {
        dom = parseDocument(html);
    }
    let endTime = Date.now()
    let element = DomUtils.getElementsByTagName('style', dom);
    let text = DomUtils.textContent(element);
    result += "text:" + text + "\r\n"
    let isTag = DomUtils.isTag(element[0]);
    result += "isTag:" + isTag + "\r\n"
    let isCDATA = DomUtils.isCDATA(element[0]);
    result += "isCDATA:" + isCDATA + "\r\n"
    let isText = DomUtils.isText(element[0]);
    result += "isText:" + isText + "\r\n"
    let isComment = DomUtils.isComment(element[0]);
    result += "isComment:" + isComment + "\r\n"

    if (result.length != 223) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_ParseDocument: ${endTime - startTime} ms`)
};

function parseFeed_test() {

    var result = '';
    let feed;
    let startTime = Date.now()
    for (let i = 0; i < forNumber; i++) {
        feed = parseFeed(rssFeed);
    }
    let endTime = Date.now()
    if (!!feed) {
        result = JSON.stringify(feed);
    } else {
        result = "feed is null";
    }
    if (result.length != 646) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_ParseFeed: ${endTime - startTime} ms`)
    // print(`${result.length}`)
};

function createDocumentStream_test() {
    var result = "";
    let parser;
    let startTime = Date.now()
    for (let i = 0; i < bigNumber; i++) {
        parser = createDocumentStream((error, dom) => {
            if (!!error) {
                result = JSON.stringify(error)
                return
            }
            let element = DomUtils.getElementsByTagName('style', dom);
            let text = DomUtils.textContent(element);
            result += "text:" + text + "\r\n"
            let isTag = DomUtils.isTag(element[0]);
            result += "isTag:" + isTag + "\r\n"
            let isCDATA = DomUtils.isCDATA(element[0]);
            result += "isCDATA:" + isCDATA + "\r\n"
            let isText = DomUtils.isText(element[0]);
            result += "isText:" + isText + "\r\n"
            let isComment = DomUtils.isComment(element[0]);
            result += "isComment:" + isComment + "\r\n"
        });
    }
    let endTime = Date.now()
    parser.write(html);
    parser.end();
    if (result.length != 223) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_CreateDocumentStream: ${endTime - startTime} ms`)
    // print(`${result.length}`)
};

function Tokenizer_test() {
    var result = ""
    const callbacks = {
        onattribdata(start, endIndex) {
        },
        onattribentity(codepoint) {
        },
        onattribend(quote, endIndex) {
        },
        onattribname(start, endIndex) {
        },
        oncdata(start, endIndex, endOffset) {
        },
        onclosetag(start, endIndex) {
        },
        oncomment(start, endIndex, endOffset) {
        },
        ondeclaration(start, endIndex) {
        },
        onend() {
        },
        onopentagend(endIndex) {
        },
        onopentagname(start, endIndex) {
        },
        onprocessinginstruction(start, endIndex) {
        },
        onselfclosingtag(endIndex) {
        },
        ontext(start, endIndex) {
            result += `start:${start}\r\nendIndex:${endIndex}\r\n`
        },
        ontextentity(codepoint, endIndex) {
        },
    }
    let tokenizer;
    let startTime = Date.now()
    for (let i = 0; i < bigNumber; i++) {
        tokenizer = new Tokenizer({
            xmlMode: true,
            decodeEntities: true,
        }, callbacks);


    }
    let endTime = Date.now()
    tokenizer.write('<html><head><title>My Title</title></head><body><h1>Hello World!</h1></body></html');
    tokenizer.end()
    if (result.length != 46) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Tokenizer: ${endTime - startTime} ms`)
    // print(`${result.length}`)
};

function Tokenizer_write_end_test() {
    var result = ""
    const callbacks = {
        onattribdata(start, endIndex) {
        },
        onattribentity(codepoint) {
        },
        onattribend(quote, endIndex) {
        },
        onattribname(start, endIndex) {
        },
        oncdata(start, endIndex, endOffset) {
        },
        onclosetag(start, endIndex) {
        },
        oncomment(start, endIndex, endOffset) {
        },
        ondeclaration(start, endIndex) {
        },
        onend() {
        },
        onopentagend(endIndex) {
        },
        onopentagname(start, endIndex) {
        },
        onprocessinginstruction(start, endIndex) {
        },
        onselfclosingtag(endIndex) {
        },
        ontext(start, endIndex) {
            result += `start:${start}\r\nendIndex:${endIndex}\r\n`
        },
        ontextentity(codepoint, endIndex) {
        },
    }
    let tokenizer;

    tokenizer = new Tokenizer({
        xmlMode: true,
        decodeEntities: true,
    }, callbacks);

    let startTime = Date.now()
    for (let i = 0; i < forNumber; i++) {
        tokenizer.write('<html><head><title>My Title</title></head><body><h1>Hello World!</h1></body></html');
        tokenizer.end()
    }
    let endTime = Date.now()
    if (!result) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Tokenizer_write_end: ${endTime - startTime} ms`)
};

function Parser_pause_resume_test() {

    var result = '';
    let arr = [];
    const handler = getEventCollector((error, actual) => {
        if (error) {
            result += "解析失败：" + JSON.stringify(error)
            return
        }
        if (actual.$event == "text") {
            arr.push(actual.data)
        }
        if (actual.$event == "end") {
            result = JSON.stringify(arr);
        }
    })
    const parser = new Parser(handler);
    parser.write("china <script type='text/javascript'>const foo = '<<bar>>';</script>",);
    let startTime = Date.now();
    for (let i = 0; i < bigNumber; i++) {
        parser.pause();
        parser.resume();
    }
    let endTime = Date.now();
    parser.end();
    if (result != `["china ","const foo = '<<bar>>';"]`) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Parser_Pause_resume: ${endTime - startTime} ms`)
    // print(`${result}`)
};

function Parser_reset_test() {

    var result = '';
    let arr = [];
    const handler = getEventCollector((error, actual) => {
        if (error) {
            result += "解析失败：" + JSON.stringify(error)
            return
        }
        if (actual.$event == "text") {
            arr.push(actual.data)
        }
        if (actual.$event == "end") {
            result = JSON.stringify(arr);
        }
    })
    const parser = new Parser(handler);
    parser.write("china <script type='text/javascript'>const foo = '<<bar>>';</script>",);
    let startTime = Date.now();
    for (let i = 0; i < bigNumber; i++) {
        parser.reset();
    }
    let endTime = Date.now();
    parser.end();
    if (result != `["china ","const foo = '<<bar>>';"]`) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Parser_Reset: ${endTime - startTime} ms`)
    // print(`${result}`)
};

function Tokenizer_pause_resume_test() {

    var result = ""
    const callbacks = {
        onattribdata(start, endIndex) {
        },
        onattribentity(codepoint) {
        },
        onattribend(quote, endIndex) {
        },
        onattribname(start, endIndex) {
        },
        oncdata(start, endIndex, endOffset) {
        },
        onclosetag(start, endIndex) {
        },
        oncomment(start, endIndex, endOffset) {
        },
        ondeclaration(start, endIndex) {
        },
        onend() {
        },
        onopentagend(endIndex) {
        },
        onopentagname(start, endIndex) {
        },
        onprocessinginstruction(start, endIndex) {
        },
        onselfclosingtag(endIndex) {
        },
        ontext(start, endIndex) {
            result += `start:${start}\r\nendIndex:${endIndex}\r\n`
        },
        ontextentity(codepoint, endIndex) {
        },
    }
    let tokenizer = new Tokenizer({
        xmlMode: true,
        decodeEntities: true,
    }, callbacks);

    tokenizer.write('<html><head><title>My Title</title></head><body><h1>Hello World!</h1></body></html');
    let startTime = Date.now()
    for (let i = 0; i < bigNumber; i++) {
        tokenizer.pause();
        tokenizer.resume();
    }
    let endTime = Date.now()
    tokenizer.end();
    if (result.length != 46) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Tokenizer_pause_resume: ${endTime - startTime} ms`)
    // print(`${result.length}`)
};

function Tokenizer_reset_test() {
    var result = ""
    const callbacks = {
        onattribdata(start, endIndex) {
        },
        onattribentity(codepoint) {
        },
        onattribend(quote, endIndex) {
        },
        onattribname(start, endIndex) {
        },
        oncdata(start, endIndex, endOffset) {
        },
        onclosetag(start, endIndex) {
        },
        oncomment(start, endIndex, endOffset) {
        },
        ondeclaration(start, endIndex) {
        },
        onend() {
        },
        onopentagend(endIndex) {
        },
        onopentagname(start, endIndex) {
        },
        onprocessinginstruction(start, endIndex) {
        },
        onselfclosingtag(endIndex) {
        },
        ontext(start, endIndex) {
            result += `start:${start}\r\nendIndex:${endIndex}\r\n`
        },
        ontextentity(codepoint, endIndex) {
        },
    }
    let tokenizer = new Tokenizer({
        xmlMode: true,
        decodeEntities: true,
    }, callbacks);

    tokenizer.write('<html><head><title>My Title</title></head><body><h1>Hello World!</h1></body></html');
    let startTime = Date.now()
    for (let i = 0; i < bigNumber; i++) {
        tokenizer.reset();
    }
    let endTime = Date.now()
    tokenizer.end();
    if (result.length != 46) {
        throw new Error('Inconsistent with expected results')
    }
    print(`htmlParser2_Tokenizer_Reset: ${endTime - startTime} ms`)
    // print(`${result.length}`)
};

let loopCountForPreheat = 1;
for (let i = 0; i < loopCountForPreheat; i++) {
    Parser_test()
    Parser_write_end_test()
    parseComplete_test()
    parseDocument_test()
    parseFeed_test()
    createDocumentStream_test()
    Tokenizer_test()
    Tokenizer_write_end_test()
    Parser_pause_resume_test()
    Parser_reset_test()
    Tokenizer_pause_resume_test()
    Tokenizer_reset_test()
}

ArkTools.waitAllJitCompileFinish();

Parser_test()
Parser_write_end_test()
parseComplete_test()
parseDocument_test()
parseFeed_test()
createDocumentStream_test()
Tokenizer_test()
Tokenizer_write_end_test()
Parser_pause_resume_test()
Parser_reset_test()
Tokenizer_pause_resume_test()
Tokenizer_reset_test()