import requests
from bs4 import BeautifulSoup
f = open('c.html')
content=f.read()
soup = BeautifulSoup(content, 'lxml')
tree = soup.find(class_ = 'category-body clearfix')
li_items=tree.find_all(class_ ="category-item")
result="["
for li_item in li_items:
    ul_contents=li_item.find_all(class_="category-content")
    result=result+"  {"
    result=result+'"name": "'+li_item.find_all("div")[0].find_all("a")[0].get_text().strip()+'",'+'"sub": ['
    print(li_item.find_all("div")[0].find_all("a")[0].get_text().strip())
    ul_nav=li_item.find_all("div")[0].find_all("ul")[0]
    nav_item_lis=ul_nav.find_all("li")
    for nav_item_li in nav_item_lis:
        result=result+"  {"
        result=result+'"name": "'+nav_item_li.find("a").get_text().strip()+'",'+'"sub": ['
        print(nav_item_li.find("a").get_text().strip())
        ul_content=ul_contents[nav_item_lis.index(nav_item_li)]
        li_content_items=ul_content.find_all("li")
        for i in range(2):
            div_wrap= li_content_items[i].find(class_="category-wrap")
            result=result+"  {"
            result=result+'"name": "'+div_wrap.find_all("a")[0].get_text()+'",'+'"sub": ['
            print(div_wrap.find_all("a")[0].get_text())
            a_desc_items=div_wrap.find_all(class_="category-text-desc")[0].find_all("a")
            for a_desc_item in a_desc_items:
                result=result+"  {"
                result=result+'"name": "'+a_desc_item.get_text().strip()+'"'
                print(a_desc_item.get_text().strip())
                result=result+"  },"
            result=result[:-1]
            result=result+"  ]"
            result=result+"  },"
        result=result[:-1]
        result=result+"  ]"
        result=result+"  },"
    result=result[:-1]
    result=result+"  ]"
    result=result+"  },"
result=result[:-1]
result=result+"  ]"
f=open('d.json','w',encoding='utf-8')
f.write(result)
f.close()