class HtmlParser(object):
    def getUrls(self, url):
        """
        :type url: str
        :rtype List[str]
        """

import collections
class Solution(object):
    def crawl(self, startUrl, htmlParser):
        """
        :type startUrl: str
        :type htmlParser: HtmlParser
        :rtype: List[str]
        """
        out = []
        already = set()
        base = startUrl.split('/')[2]
        stack = {startUrl}
        already.add(startUrl)
        while stack:
            next_stack = set()
            for url in stack:
                results = htmlParser.getUrls(url)
                out.append(url)
                for result in results:
                    if result not in already:
                        if result.split('/')[2] == base:
                            next_stack.add(result)
            stack = next_stack
        return out
