import tokenize
import token
from io import StringIO

标点符号字典 = {
    '（':'(', 
    '）':')', 
    '，':',', 
    '“':'"', 
    '”':'"', 
    '‘':"'", 
    '’':"'", 
    '：':':', 
    '；':';', 
    '。':'.', 
    '【':'[', 
    '】':']'
    }

中文标点符号 = list(标点符号字典.keys())

def 替换标点符号(source, script=1):
    if script:
        source = source.decode('UTF-8')
    tokenlist = []
    t = tokenize.generate_tokens(StringIO(source).readline)
    for tokentype, tokenstring, tokenbegin, tokenend, tokenline in t:
        if tokentype == token.ERRORTOKEN and tokenstring in 中文标点符号:
            tokenlist.append((tokentype, 标点符号字典.get(tokenstring), tokenbegin, tokenend, tokenline))
        else:
            tokenlist.append((tokentype, tokenstring, tokenbegin, tokenend, tokenline))
    return tokenize.untokenize(tokenlist)
