from urllib.request import urlopen
from bs4 import BeautifulSoup
import datetime
import random
import re
random.seed(datetime.datetime.now())

pages=set()
def getLinks(Url):
    global pages
    html = urlopen(Url)
    bsObj = BeautifulSoup(html)
    #nameList = bsObj.findAll(
    #   "a", {"href": re.compile("index.php\/notice\/c_notice\/content\/")})
    #for name in nameList:
    #  print(name.get_text())
    try:
        
    return bsObj.findAll(
        'a', href=re.compile("index.php\/notice\/c_notice\/content\/"))


links = getLinks("http://localhost/54/ksyc1200/")
s = 0
while len(links) > 0:
    newArticle = links[random.randint(0, len(links) - 1)].attrs['href']
    print(newArticle)
    s += s
    if s > 10:
        break
    links = getLinks(newArticle)
