File size: 69,187 Bytes
3a7f06a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/proxy.py","language":"python","identifier":"GetIPPOOLS","parameters":"(num)","argument_list":"","return_statement":"return IPPOOL","docstring":"#\u81ea\u5df1\u83b7\u53d6\u7684ip\n    IPPOOLS1=urllib.request.urlopen(\"http:\/\/127.0.0.1:8000\/?types=0&count=20&country=%E5%9B%BD%E5%86%85\").read().decode(\"utf-8\",'ignore')\n    IPPOOLS2=re.findall('\\\"(\\d+\\.\\d+\\.\\d+\\.\\d+\\\"\\,\\s*\\d+)',IPPOOLS1)\n    IPPOOL=[i.replace('\", ',':') for i in IPPOOLS2]","docstring_summary":"#\u81ea\u5df1\u83b7\u53d6\u7684ip\n    IPPOOLS1=urllib.request.urlopen(\"http:\/\/127.0.0.1:8000\/?types=0&count=20&country=%E5%9B%BD%E5%86%85\").read().decode(\"utf-8\",'ignore')\n    IPPOOLS2=re.findall('\\\"(\\d+\\.\\d+\\.\\d+\\.\\d+\\\"\\,\\s*\\d+)',IPPOOLS1)\n    IPPOOL=[i.replace('\", ',':') for i in IPPOOLS2]","docstring_tokens":["#\u81ea\u5df1\u83b7\u53d6\u7684ip","IPPOOLS1","=","urllib",".","request",".","urlopen","(","http",":","\/\/","127",".","0",".","0",".","1",":","8000","\/","?types","=","0&count","=","20&country","=","%E5%9B%BD%E5%86%85",")",".","read","()",".","decode","(","utf","-","8","ignore",")","IPPOOLS2","=","re",".","findall","(","\\","(","\\","d","+","\\",".","\\","d","+","\\",".","\\","d","+","\\",".","\\","d","+","\\","\\","\\","s","*","\\","d","+",")","IPPOOLS1",")","IPPOOL","=","[","i",".","replace","(",":",")","for","i","in","IPPOOLS2","]"],"function":"def GetIPPOOLS(num):\n    #\u5927\u8c61\u4ee3\u7406\u4e70\u7684ip,5\u514320000\u4e2a\uff0c\u6bcf\u5341\u4e2a\u5dee\u4e0d\u591a\u6709\u4e00\u4e2a\u80fd\u7528\n    IPPOOL=urllib.request.urlopen(\"http:\/\/tpv.daxiangdaili.com\/ip\/?tid=559480480576119&num=\"+str(num)+\"&operator=1&filter=on&protocol=http&category=2&delay=1\").read().decode(\"utf-8\",\"ignore\").split('\\r\\n')\n    '''\n    #\u81ea\u5df1\u83b7\u53d6\u7684ip\n    IPPOOLS1=urllib.request.urlopen(\"http:\/\/127.0.0.1:8000\/?types=0&count=20&country=%E5%9B%BD%E5%86%85\").read().decode(\"utf-8\",'ignore')\n    IPPOOLS2=re.findall('\\\"(\\d+\\.\\d+\\.\\d+\\.\\d+\\\"\\,\\s*\\d+)',IPPOOLS1)\n    IPPOOL=[i.replace('\", ',':') for i in IPPOOLS2]\n    '''\n    return IPPOOL","function_tokens":["def","GetIPPOOLS","(","num",")",":","#\u5927\u8c61\u4ee3\u7406\u4e70\u7684ip,5\u514320000\u4e2a\uff0c\u6bcf\u5341\u4e2a\u5dee\u4e0d\u591a\u6709\u4e00\u4e2a\u80fd\u7528","IPPOOL","=","urllib",".","request",".","urlopen","(","\"http:\/\/tpv.daxiangdaili.com\/ip\/?tid=559480480576119&num=\"","+","str","(","num",")","+","\"&operator=1&filter=on&protocol=http&category=2&delay=1\"",")",".","read","(",")",".","decode","(","\"utf-8\"",",","\"ignore\"",")",".","split","(","'\\r\\n'",")","return","IPPOOL"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/proxy.py#L17-L26"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/proxy.py","language":"python","identifier":"initIPPOOLS","parameters":"(rconn)","argument_list":"","return_statement":"","docstring":"\u628a\u6709\u6548\u7684IP\u5b58\u5165\tREDIS\u6570\u636e\u5e93","docstring_summary":"\u628a\u6709\u6548\u7684IP\u5b58\u5165\tREDIS\u6570\u636e\u5e93","docstring_tokens":["\u628a\u6709\u6548\u7684IP\u5b58\u5165","REDIS\u6570\u636e\u5e93"],"function":"def initIPPOOLS(rconn):\n    \"\"\"\u628a\u6709\u6548\u7684IP\u5b58\u5165\tREDIS\u6570\u636e\u5e93\"\"\"\n\n    ipNum=len(rconn.keys('IP*'))\n    if ipNum<IPPOOLNUM:\n        IPPOOLS=GetIPPOOLS(IPPOOLNUM)\n        for ipall in IPPOOLS:\n            try:\n                ip=ipall.split(':')[0]\n                port=ipall.split(':')[1]\n                telnetlib.Telnet(ip,port=port,timeout=2) #\u68c0\u9a8c\u4ee3\u7406ip\u662f\u5426\u6709\u6548\n            except:\n                logger.warning(\"The ip is not available !( IP:%s )\" % ipall)\n            else:\n                logger.warning(\"Get ip Success!( IP:%s )\" % ipall)\n                rconn.set(\"IP:%s:10\"%(ipall),ipall)     #10 is status\n    else:\n        logger.warning(\"The number of  the IP is %s!\" % str(ipNum))","function_tokens":["def","initIPPOOLS","(","rconn",")",":","ipNum","=","len","(","rconn",".","keys","(","'IP*'",")",")","if","ipNum","<","IPPOOLNUM",":","IPPOOLS","=","GetIPPOOLS","(","IPPOOLNUM",")","for","ipall","in","IPPOOLS",":","try",":","ip","=","ipall",".","split","(","':'",")","[","0","]","port","=","ipall",".","split","(","':'",")","[","1","]","telnetlib",".","Telnet","(","ip",",","port","=","port",",","timeout","=","2",")","#\u68c0\u9a8c\u4ee3\u7406ip\u662f\u5426\u6709\u6548","except",":","logger",".","warning","(","\"The ip is not available !( IP:%s )\"","%","ipall",")","else",":","logger",".","warning","(","\"Get ip Success!( IP:%s )\"","%","ipall",")","rconn",".","set","(","\"IP:%s:10\"","%","(","ipall",")",",","ipall",")","#10 is status","else",":","logger",".","warning","(","\"The number of  the IP is %s!\"","%","str","(","ipNum",")",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/proxy.py#L28-L45"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/proxy.py","language":"python","identifier":"updateIPPOOLS","parameters":"(rconn,ip,status,flag=0)","argument_list":"","return_statement":"","docstring":"update status","docstring_summary":"update status","docstring_tokens":["update","status"],"function":"def updateIPPOOLS(rconn,ip,status,flag=0): # 0\u4ee3\u8868\u5bf9status\u51cf\u4e00\uff0c-1\u4ee3\u8868\u51cf2\uff0c1\u4ee3\u8868\u52a01\n    if int(status) < 1:\n        removeIPPOOLS(rconn,ip,status)\n        return\n    '''update status'''\n    if flag == 1: #+status\n        if int(status) < 10:\n            rconn.delete('IP:'+ ip + ':' + status)\n            status = int(status) + 1\n            rconn.set(\"IP:%s:%s\"%(ip,str(status)),ip)\n    elif flag == -1:\n        rconn.delete('IP:'+ ip + ':' + status)\n        status = int(status) - 2\n        rconn.set(\"IP:%s:%s\"%(ip,str(status)),ip)\n    else:\n        rconn.delete('IP:'+ ip + ':' + status)\n        status = int(status) - 1\n        rconn.set(\"IP:%s:%s\"%(ip,str(status)),ip)","function_tokens":["def","updateIPPOOLS","(","rconn",",","ip",",","status",",","flag","=","0",")",":","# 0\u4ee3\u8868\u5bf9status\u51cf\u4e00\uff0c-1\u4ee3\u8868\u51cf2\uff0c1\u4ee3\u8868\u52a01","if","int","(","status",")","<","1",":","removeIPPOOLS","(","rconn",",","ip",",","status",")","return","if","flag","==","1",":","#+status","if","int","(","status",")","<","10",":","rconn",".","delete","(","'IP:'","+","ip","+","':'","+","status",")","status","=","int","(","status",")","+","1","rconn",".","set","(","\"IP:%s:%s\"","%","(","ip",",","str","(","status",")",")",",","ip",")","elif","flag","==","-","1",":","rconn",".","delete","(","'IP:'","+","ip","+","':'","+","status",")","status","=","int","(","status",")","-","2","rconn",".","set","(","\"IP:%s:%s\"","%","(","ip",",","str","(","status",")",")",",","ip",")","else",":","rconn",".","delete","(","'IP:'","+","ip","+","':'","+","status",")","status","=","int","(","status",")","-","1","rconn",".","set","(","\"IP:%s:%s\"","%","(","ip",",","str","(","status",")",")",",","ip",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/proxy.py#L47-L64"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/cookie.py","language":"python","identifier":"initCookie","parameters":"(rconn, spiderName)","argument_list":"","return_statement":"","docstring":"\u83b7\u53d6\u6240\u6709\u8d26\u53f7\u7684Cookies\uff0c\u5b58\u5165Redis\u3002\u5982\u679cRedis\u5df2\u6709\u8be5\u8d26\u53f7\u7684Cookie\uff0c\u5219\u4e0d\u518d\u83b7\u53d6\u3002","docstring_summary":"\u83b7\u53d6\u6240\u6709\u8d26\u53f7\u7684Cookies\uff0c\u5b58\u5165Redis\u3002\u5982\u679cRedis\u5df2\u6709\u8be5\u8d26\u53f7\u7684Cookie\uff0c\u5219\u4e0d\u518d\u83b7\u53d6\u3002","docstring_tokens":["\u83b7\u53d6\u6240\u6709\u8d26\u53f7\u7684Cookies\uff0c\u5b58\u5165Redis\u3002\u5982\u679cRedis\u5df2\u6709\u8be5\u8d26\u53f7\u7684Cookie\uff0c\u5219\u4e0d\u518d\u83b7\u53d6\u3002"],"function":"def initCookie(rconn, spiderName):\n    \"\"\" \u83b7\u53d6\u6240\u6709\u8d26\u53f7\u7684Cookies\uff0c\u5b58\u5165Redis\u3002\u5982\u679cRedis\u5df2\u6709\u8be5\u8d26\u53f7\u7684Cookie\uff0c\u5219\u4e0d\u518d\u83b7\u53d6\u3002 \"\"\"\n    for zhihu in myZhiHu:\n        if rconn.get(\"%s:Cookies:%s--%s\" % (spiderName, zhihu[0], zhihu[1])) is None:  # 'zhihuspider:Cookies:\u8d26\u53f7--\u5bc6\u7801'\uff0c\u4e3aNone\u5373\u4e0d\u5b58\u5728\u3002\n            cookie = getCookie(zhihu[0], zhihu[1],zhihu[2])\n            if len(cookie) > 0:\n                rconn.set(\"%s:Cookies:%s--%s\" % (spiderName, zhihu[0], zhihu[1]), cookie)\n    cookieNum = str(rconn.keys()).count(\"zhihuspider:Cookies\")\n    logger.warning(\"The num of the cookies is %s\" % cookieNum)\n    if cookieNum == 0:\n        logger.warning('Stopping...')\n        os.system(\"pause\")","function_tokens":["def","initCookie","(","rconn",",","spiderName",")",":","for","zhihu","in","myZhiHu",":","if","rconn",".","get","(","\"%s:Cookies:%s--%s\"","%","(","spiderName",",","zhihu","[","0","]",",","zhihu","[","1","]",")",")","is","None",":","# 'zhihuspider:Cookies:\u8d26\u53f7--\u5bc6\u7801'\uff0c\u4e3aNone\u5373\u4e0d\u5b58\u5728\u3002","cookie","=","getCookie","(","zhihu","[","0","]",",","zhihu","[","1","]",",","zhihu","[","2","]",")","if","len","(","cookie",")",">","0",":","rconn",".","set","(","\"%s:Cookies:%s--%s\"","%","(","spiderName",",","zhihu","[","0","]",",","zhihu","[","1","]",")",",","cookie",")","cookieNum","=","str","(","rconn",".","keys","(",")",")",".","count","(","\"zhihuspider:Cookies\"",")","logger",".","warning","(","\"The num of the cookies is %s\"","%","cookieNum",")","if","cookieNum","==","0",":","logger",".","warning","(","'Stopping...'",")","os",".","system","(","\"pause\"",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/cookie.py#L145-L156"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/cookie.py","language":"python","identifier":"updateCookie","parameters":"(accountText, rconn, spiderName, cookie)","argument_list":"","return_statement":"","docstring":"\u66f4\u65b0\u4e00\u4e2a\u8d26\u53f7\u7684Cookie","docstring_summary":"\u66f4\u65b0\u4e00\u4e2a\u8d26\u53f7\u7684Cookie","docstring_tokens":["\u66f4\u65b0\u4e00\u4e2a\u8d26\u53f7\u7684Cookie"],"function":"def updateCookie(accountText, rconn, spiderName, cookie):\n    \"\"\" \u66f4\u65b0\u4e00\u4e2a\u8d26\u53f7\u7684Cookie \"\"\"\n    account = accountText.split(\"--\")[0]\n    #pdb.set_trace()\n    new_cookie = UpdateCookie(account, cookie)\n    if len(new_cookie) > 0:\n        logger.warning(\"The cookie of %s has been updated successfully!\" % account)\n        rconn.set(\"%s:Cookies:%s\" % (spiderName, accountText), new_cookie)\n    else:\n        logger.warning(\"The cookie of %s updated failed! Remove it!\" % accountText)\n        removeCookie(accountText, rconn, spiderName)","function_tokens":["def","updateCookie","(","accountText",",","rconn",",","spiderName",",","cookie",")",":","account","=","accountText",".","split","(","\"--\"",")","[","0","]","#pdb.set_trace()","new_cookie","=","UpdateCookie","(","account",",","cookie",")","if","len","(","new_cookie",")",">","0",":","logger",".","warning","(","\"The cookie of %s has been updated successfully!\"","%","account",")","rconn",".","set","(","\"%s:Cookies:%s\"","%","(","spiderName",",","accountText",")",",","new_cookie",")","else",":","logger",".","warning","(","\"The cookie of %s updated failed! Remove it!\"","%","accountText",")","removeCookie","(","accountText",",","rconn",",","spiderName",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/cookie.py#L158-L168"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/cookie.py","language":"python","identifier":"removeCookie","parameters":"(accountText, rconn, spiderName)","argument_list":"","return_statement":"","docstring":"\u5220\u9664\u67d0\u4e2a\u8d26\u53f7\u7684Cookie","docstring_summary":"\u5220\u9664\u67d0\u4e2a\u8d26\u53f7\u7684Cookie","docstring_tokens":["\u5220\u9664\u67d0\u4e2a\u8d26\u53f7\u7684Cookie"],"function":"def removeCookie(accountText, rconn, spiderName):\n    \"\"\" \u5220\u9664\u67d0\u4e2a\u8d26\u53f7\u7684Cookie \"\"\"\n    rconn.delete(\"%s:Cookies:%s\" % (spiderName, accountText))\n    cookieNum = str(rconn.keys()).count(\"zhihuspider:Cookies\")\n    logger.warning(\"The num of the cookies left is %s\" % cookieNum)\n    if cookieNum == 0:\n        logger.warning(\"Stopping...\")\n        os.system(\"pause\")","function_tokens":["def","removeCookie","(","accountText",",","rconn",",","spiderName",")",":","rconn",".","delete","(","\"%s:Cookies:%s\"","%","(","spiderName",",","accountText",")",")","cookieNum","=","str","(","rconn",".","keys","(",")",")",".","count","(","\"zhihuspider:Cookies\"",")","logger",".","warning","(","\"The num of the cookies left is %s\"","%","cookieNum",")","if","cookieNum","==","0",":","logger",".","warning","(","\"Stopping...\"",")","os",".","system","(","\"pause\"",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/cookie.py#L170-L177"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/scheduler.py","language":"python","identifier":"Scheduler.__init__","parameters":"(self, server,\n                 persist=False,\n                 flush_on_start=False,\n                 queue_key=defaults.SCHEDULER_QUEUE_KEY,\n                 queue_cls=defaults.SCHEDULER_QUEUE_CLASS,\n                 dupefilter_key=defaults.SCHEDULER_DUPEFILTER_KEY,\n                 dupefilter_cls=defaults.SCHEDULER_DUPEFILTER_CLASS,\n                 idle_before_close=0,\n                 serializer=None)","argument_list":"","return_statement":"","docstring":"Initialize scheduler.\n\n        Parameters\n        ----------\n        server : Redis\n            The redis server instance.\n        persist : bool\n            Whether to flush requests when closing. Default is False.\n        flush_on_start : bool\n            Whether to flush requests on start. Default is False.\n        queue_key : str\n            Requests queue key.\n        queue_cls : str\n            Importable path to the queue class.\n        dupefilter_key : str\n            Duplicates filter key.\n        dupefilter_cls : str\n            Importable path to the dupefilter class.\n        idle_before_close : int\n            Timeout before giving up.","docstring_summary":"Initialize scheduler.","docstring_tokens":["Initialize","scheduler","."],"function":"def __init__(self, server,\n                 persist=False,\n                 flush_on_start=False,\n                 queue_key=defaults.SCHEDULER_QUEUE_KEY,\n                 queue_cls=defaults.SCHEDULER_QUEUE_CLASS,\n                 dupefilter_key=defaults.SCHEDULER_DUPEFILTER_KEY,\n                 dupefilter_cls=defaults.SCHEDULER_DUPEFILTER_CLASS,\n                 idle_before_close=0,\n                 serializer=None):\n        \"\"\"Initialize scheduler.\n\n        Parameters\n        ----------\n        server : Redis\n            The redis server instance.\n        persist : bool\n            Whether to flush requests when closing. Default is False.\n        flush_on_start : bool\n            Whether to flush requests on start. Default is False.\n        queue_key : str\n            Requests queue key.\n        queue_cls : str\n            Importable path to the queue class.\n        dupefilter_key : str\n            Duplicates filter key.\n        dupefilter_cls : str\n            Importable path to the dupefilter class.\n        idle_before_close : int\n            Timeout before giving up.\n\n        \"\"\"\n        if idle_before_close < 0:\n            raise TypeError(\"idle_before_close cannot be negative\")\n\n        self.server = server\n        self.persist = persist\n        self.flush_on_start = flush_on_start\n        self.queue_key = queue_key\n        self.queue_cls = queue_cls\n        self.dupefilter_cls = dupefilter_cls\n        self.dupefilter_key = dupefilter_key\n        self.idle_before_close = idle_before_close\n        self.serializer = serializer\n        self.stats = None","function_tokens":["def","__init__","(","self",",","server",",","persist","=","False",",","flush_on_start","=","False",",","queue_key","=","defaults",".","SCHEDULER_QUEUE_KEY",",","queue_cls","=","defaults",".","SCHEDULER_QUEUE_CLASS",",","dupefilter_key","=","defaults",".","SCHEDULER_DUPEFILTER_KEY",",","dupefilter_cls","=","defaults",".","SCHEDULER_DUPEFILTER_CLASS",",","idle_before_close","=","0",",","serializer","=","None",")",":","if","idle_before_close","<","0",":","raise","TypeError","(","\"idle_before_close cannot be negative\"",")","self",".","server","=","server","self",".","persist","=","persist","self",".","flush_on_start","=","flush_on_start","self",".","queue_key","=","queue_key","self",".","queue_cls","=","queue_cls","self",".","dupefilter_cls","=","dupefilter_cls","self",".","dupefilter_key","=","dupefilter_key","self",".","idle_before_close","=","idle_before_close","self",".","serializer","=","serializer","self",".","stats","=","None"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/scheduler.py#L34-L77"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/pipelines.py","language":"python","identifier":"RedisPipeline.__init__","parameters":"(self, server,\n                 key=defaults.PIPELINE_KEY,\n                 serialize_func=default_serialize)","argument_list":"","return_statement":"","docstring":"Initialize pipeline.\n\n        Parameters\n        ----------\n        server : StrictRedis\n            Redis client instance.\n        key : str\n            Redis key where to store items.\n        serialize_func : callable\n            Items serializer function.","docstring_summary":"Initialize pipeline.","docstring_tokens":["Initialize","pipeline","."],"function":"def __init__(self, server,\n                 key=defaults.PIPELINE_KEY,\n                 serialize_func=default_serialize):\n        \"\"\"Initialize pipeline.\n\n        Parameters\n        ----------\n        server : StrictRedis\n            Redis client instance.\n        key : str\n            Redis key where to store items.\n        serialize_func : callable\n            Items serializer function.\n\n        \"\"\"\n        self.server = server\n        self.key = key\n        self.serialize = serialize_func","function_tokens":["def","__init__","(","self",",","server",",","key","=","defaults",".","PIPELINE_KEY",",","serialize_func","=","default_serialize",")",":","self",".","server","=","server","self",".","key","=","key","self",".","serialize","=","serialize_func"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/pipelines.py#L23-L40"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/pipelines.py","language":"python","identifier":"RedisPipeline.item_key","parameters":"(self, item, spider)","argument_list":"","return_statement":"return self.key % {'spider': spider.name}","docstring":"Returns redis key based on given spider.\n\n        Override this function to use a different key depending on the item\n        and\/or spider.","docstring_summary":"Returns redis key based on given spider.","docstring_tokens":["Returns","redis","key","based","on","given","spider","."],"function":"def item_key(self, item, spider):\n        \"\"\"Returns redis key based on given spider.\n\n        Override this function to use a different key depending on the item\n        and\/or spider.\n\n        \"\"\"\n        return self.key % {'spider': spider.name}","function_tokens":["def","item_key","(","self",",","item",",","spider",")",":","return","self",".","key","%","{","'spider'",":","spider",".","name","}"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/pipelines.py#L69-L76"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base.__init__","parameters":"(self, server, spider, key, serializer=None)","argument_list":"","return_statement":"","docstring":"Initialize per-spider redis queue.\n\n        Parameters\n        ----------\n        server : StrictRedis\n            Redis client instance.\n        spider : Spider\n            Scrapy spider instance.\n        key: str\n            Redis key where to put and get messages.\n        serializer : object\n            Serializer object with ``loads`` and ``dumps`` methods.","docstring_summary":"Initialize per-spider redis queue.","docstring_tokens":["Initialize","per","-","spider","redis","queue","."],"function":"def __init__(self, server, spider, key, serializer=None):\n        \"\"\"Initialize per-spider redis queue.\n\n        Parameters\n        ----------\n        server : StrictRedis\n            Redis client instance.\n        spider : Spider\n            Scrapy spider instance.\n        key: str\n            Redis key where to put and get messages.\n        serializer : object\n            Serializer object with ``loads`` and ``dumps`` methods.\n\n        \"\"\"\n        if serializer is None:\n            # Backward compatibility.\n            # TODO: deprecate pickle.\n            serializer = picklecompat\n        if not hasattr(serializer, 'loads'):\n            raise TypeError(\"serializer does not implement 'loads' function: %r\"\n                            % serializer)\n        if not hasattr(serializer, 'dumps'):\n            raise TypeError(\"serializer '%s' does not implement 'dumps' function: %r\"\n                            % serializer)\n\n        self.server = server\n        self.spider = spider\n        self.key = key % {'spider': spider.name}\n        self.serializer = serializer","function_tokens":["def","__init__","(","self",",","server",",","spider",",","key",",","serializer","=","None",")",":","if","serializer","is","None",":","# Backward compatibility.","# TODO: deprecate pickle.","serializer","=","picklecompat","if","not","hasattr","(","serializer",",","'loads'",")",":","raise","TypeError","(","\"serializer does not implement 'loads' function: %r\"","%","serializer",")","if","not","hasattr","(","serializer",",","'dumps'",")",":","raise","TypeError","(","\"serializer '%s' does not implement 'dumps' function: %r\"","%","serializer",")","self",".","server","=","server","self",".","spider","=","spider","self",".","key","=","key","%","{","'spider'",":","spider",".","name","}","self",".","serializer","=","serializer"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L9-L38"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base._encode_request","parameters":"(self, request)","argument_list":"","return_statement":"return self.serializer.dumps(obj)","docstring":"Encode a request object","docstring_summary":"Encode a request object","docstring_tokens":["Encode","a","request","object"],"function":"def _encode_request(self, request):\n        \"\"\"Encode a request object\"\"\"\n        obj = request_to_dict(request, self.spider)\n        return self.serializer.dumps(obj)","function_tokens":["def","_encode_request","(","self",",","request",")",":","obj","=","request_to_dict","(","request",",","self",".","spider",")","return","self",".","serializer",".","dumps","(","obj",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L40-L43"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base._decode_request","parameters":"(self, encoded_request)","argument_list":"","return_statement":"return request_from_dict(obj, self.spider)","docstring":"Decode an request previously encoded","docstring_summary":"Decode an request previously encoded","docstring_tokens":["Decode","an","request","previously","encoded"],"function":"def _decode_request(self, encoded_request):\n        \"\"\"Decode an request previously encoded\"\"\"\n        obj = self.serializer.loads(encoded_request)\n        return request_from_dict(obj, self.spider)","function_tokens":["def","_decode_request","(","self",",","encoded_request",")",":","obj","=","self",".","serializer",".","loads","(","encoded_request",")","return","request_from_dict","(","obj",",","self",".","spider",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L45-L48"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base.__len__","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Return the length of the queue","docstring_summary":"Return the length of the queue","docstring_tokens":["Return","the","length","of","the","queue"],"function":"def __len__(self):\n        \"\"\"Return the length of the queue\"\"\"\n        raise NotImplementedError","function_tokens":["def","__len__","(","self",")",":","raise","NotImplementedError"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L50-L52"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base.push","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Push a request","docstring_summary":"Push a request","docstring_tokens":["Push","a","request"],"function":"def push(self, request):\n        \"\"\"Push a request\"\"\"\n        raise NotImplementedError","function_tokens":["def","push","(","self",",","request",")",":","raise","NotImplementedError"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L54-L56"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base.pop","parameters":"(self, timeout=0)","argument_list":"","return_statement":"","docstring":"Pop a request","docstring_summary":"Pop a request","docstring_tokens":["Pop","a","request"],"function":"def pop(self, timeout=0):\n        \"\"\"Pop a request\"\"\"\n        raise NotImplementedError","function_tokens":["def","pop","(","self",",","timeout","=","0",")",":","raise","NotImplementedError"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L58-L60"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"Base.clear","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Clear queue\/stack","docstring_summary":"Clear queue\/stack","docstring_tokens":["Clear","queue","\/","stack"],"function":"def clear(self):\n        \"\"\"Clear queue\/stack\"\"\"\n        self.server.delete(self.key)","function_tokens":["def","clear","(","self",")",":","self",".","server",".","delete","(","self",".","key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L62-L64"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"FifoQueue.__len__","parameters":"(self)","argument_list":"","return_statement":"return self.server.llen(self.key)","docstring":"Return the length of the queue","docstring_summary":"Return the length of the queue","docstring_tokens":["Return","the","length","of","the","queue"],"function":"def __len__(self):\n        \"\"\"Return the length of the queue\"\"\"\n        return self.server.llen(self.key)","function_tokens":["def","__len__","(","self",")",":","return","self",".","server",".","llen","(","self",".","key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L70-L72"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"FifoQueue.push","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Push a request","docstring_summary":"Push a request","docstring_tokens":["Push","a","request"],"function":"def push(self, request):\n        \"\"\"Push a request\"\"\"\n        self.server.lpush(self.key, self._encode_request(request))","function_tokens":["def","push","(","self",",","request",")",":","self",".","server",".","lpush","(","self",".","key",",","self",".","_encode_request","(","request",")",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L74-L76"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"FifoQueue.pop","parameters":"(self, timeout=0)","argument_list":"","return_statement":"","docstring":"Pop a request","docstring_summary":"Pop a request","docstring_tokens":["Pop","a","request"],"function":"def pop(self, timeout=0):\n        \"\"\"Pop a request\"\"\"\n        if timeout > 0:\n            data = self.server.brpop(self.key, timeout)\n            if isinstance(data, tuple):\n                data = data[1]\n        else:\n            data = self.server.rpop(self.key)\n        if data:\n            return self._decode_request(data)","function_tokens":["def","pop","(","self",",","timeout","=","0",")",":","if","timeout",">","0",":","data","=","self",".","server",".","brpop","(","self",".","key",",","timeout",")","if","isinstance","(","data",",","tuple",")",":","data","=","data","[","1","]","else",":","data","=","self",".","server",".","rpop","(","self",".","key",")","if","data",":","return","self",".","_decode_request","(","data",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L78-L87"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"PriorityQueue.__len__","parameters":"(self)","argument_list":"","return_statement":"return self.server.zcard(self.key)","docstring":"Return the length of the queue","docstring_summary":"Return the length of the queue","docstring_tokens":["Return","the","length","of","the","queue"],"function":"def __len__(self):\n        \"\"\"Return the length of the queue\"\"\"\n        return self.server.zcard(self.key)","function_tokens":["def","__len__","(","self",")",":","return","self",".","server",".","zcard","(","self",".","key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L93-L95"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"PriorityQueue.push","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Push a request","docstring_summary":"Push a request","docstring_tokens":["Push","a","request"],"function":"def push(self, request):\n        \"\"\"Push a request\"\"\"\n        data = self._encode_request(request)\n        score = -request.priority\n        # We don't use zadd method as the order of arguments change depending on\n        # whether the class is Redis or StrictRedis, and the option of using\n        # kwargs only accepts strings, not bytes.\n        self.server.execute_command('ZADD', self.key, score, data)","function_tokens":["def","push","(","self",",","request",")",":","data","=","self",".","_encode_request","(","request",")","score","=","-","request",".","priority","# We don't use zadd method as the order of arguments change depending on","# whether the class is Redis or StrictRedis, and the option of using","# kwargs only accepts strings, not bytes.","self",".","server",".","execute_command","(","'ZADD'",",","self",".","key",",","score",",","data",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L97-L104"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"PriorityQueue.pop","parameters":"(self, timeout=0)","argument_list":"","return_statement":"","docstring":"Pop a request\n        timeout not support in this queue class","docstring_summary":"Pop a request\n        timeout not support in this queue class","docstring_tokens":["Pop","a","request","timeout","not","support","in","this","queue","class"],"function":"def pop(self, timeout=0):\n        \"\"\"\n        Pop a request\n        timeout not support in this queue class\n        \"\"\"\n        # use atomic range\/remove using multi\/exec\n        pipe = self.server.pipeline()\n        pipe.multi()\n        pipe.zrange(self.key, 0, 0).zremrangebyrank(self.key, 0, 0)\n        results, count = pipe.execute()\n        if results:\n            return self._decode_request(results[0])","function_tokens":["def","pop","(","self",",","timeout","=","0",")",":","# use atomic range\/remove using multi\/exec","pipe","=","self",".","server",".","pipeline","(",")","pipe",".","multi","(",")","pipe",".","zrange","(","self",".","key",",","0",",","0",")",".","zremrangebyrank","(","self",".","key",",","0",",","0",")","results",",","count","=","pipe",".","execute","(",")","if","results",":","return","self",".","_decode_request","(","results","[","0","]",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L106-L117"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"LifoQueue.__len__","parameters":"(self)","argument_list":"","return_statement":"return self.server.llen(self.key)","docstring":"Return the length of the stack","docstring_summary":"Return the length of the stack","docstring_tokens":["Return","the","length","of","the","stack"],"function":"def __len__(self):\n        \"\"\"Return the length of the stack\"\"\"\n        return self.server.llen(self.key)","function_tokens":["def","__len__","(","self",")",":","return","self",".","server",".","llen","(","self",".","key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L123-L125"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"LifoQueue.push","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Push a request","docstring_summary":"Push a request","docstring_tokens":["Push","a","request"],"function":"def push(self, request):\n        \"\"\"Push a request\"\"\"\n        self.server.lpush(self.key, self._encode_request(request))","function_tokens":["def","push","(","self",",","request",")",":","self",".","server",".","lpush","(","self",".","key",",","self",".","_encode_request","(","request",")",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L127-L129"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/queue.py","language":"python","identifier":"LifoQueue.pop","parameters":"(self, timeout=0)","argument_list":"","return_statement":"","docstring":"Pop a request","docstring_summary":"Pop a request","docstring_tokens":["Pop","a","request"],"function":"def pop(self, timeout=0):\n        \"\"\"Pop a request\"\"\"\n        if timeout > 0:\n            data = self.server.blpop(self.key, timeout)\n            if isinstance(data, tuple):\n                data = data[1]\n        else:\n            data = self.server.lpop(self.key)\n\n        if data:\n            return self._decode_request(data)","function_tokens":["def","pop","(","self",",","timeout","=","0",")",":","if","timeout",">","0",":","data","=","self",".","server",".","blpop","(","self",".","key",",","timeout",")","if","isinstance","(","data",",","tuple",")",":","data","=","data","[","1","]","else",":","data","=","self",".","server",".","lpop","(","self",".","key",")","if","data",":","return","self",".","_decode_request","(","data",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/queue.py#L131-L141"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/connection.py","language":"python","identifier":"get_redis_from_settings","parameters":"(settings)","argument_list":"","return_statement":"return get_redis(**params)","docstring":"Returns a redis client instance from given Scrapy settings object.\n\n    This function uses ``get_client`` to instantiate the client and uses\n    ``defaults.REDIS_PARAMS`` global as defaults values for the parameters. You\n    can override them using the ``REDIS_PARAMS`` setting.\n\n    Parameters\n    ----------\n    settings : Settings\n        A scrapy settings object. See the supported settings below.\n\n    Returns\n    -------\n    server\n        Redis client instance.\n\n    Other Parameters\n    ----------------\n    REDIS_URL : str, optional\n        Server connection URL.\n    REDIS_HOST : str, optional\n        Server host.\n    REDIS_PORT : str, optional\n        Server port.\n    REDIS_ENCODING : str, optional\n        Data encoding.\n    REDIS_PARAMS : dict, optional\n        Additional client parameters.","docstring_summary":"Returns a redis client instance from given Scrapy settings object.","docstring_tokens":["Returns","a","redis","client","instance","from","given","Scrapy","settings","object","."],"function":"def get_redis_from_settings(settings):\n    \"\"\"Returns a redis client instance from given Scrapy settings object.\n\n    This function uses ``get_client`` to instantiate the client and uses\n    ``defaults.REDIS_PARAMS`` global as defaults values for the parameters. You\n    can override them using the ``REDIS_PARAMS`` setting.\n\n    Parameters\n    ----------\n    settings : Settings\n        A scrapy settings object. See the supported settings below.\n\n    Returns\n    -------\n    server\n        Redis client instance.\n\n    Other Parameters\n    ----------------\n    REDIS_URL : str, optional\n        Server connection URL.\n    REDIS_HOST : str, optional\n        Server host.\n    REDIS_PORT : str, optional\n        Server port.\n    REDIS_ENCODING : str, optional\n        Data encoding.\n    REDIS_PARAMS : dict, optional\n        Additional client parameters.\n\n    \"\"\"\n    params = defaults.REDIS_PARAMS.copy()\n    params.update(settings.getdict('REDIS_PARAMS'))\n    # XXX: Deprecate REDIS_* settings.\n    for source, dest in SETTINGS_PARAMS_MAP.items():\n        val = settings.get(source)\n        if val:\n            params[dest] = val\n\n    # Allow ``redis_cls`` to be a path to a class.\n    if isinstance(params.get('redis_cls'), six.string_types):\n        params['redis_cls'] = load_object(params['redis_cls'])\n\n    return get_redis(**params)","function_tokens":["def","get_redis_from_settings","(","settings",")",":","params","=","defaults",".","REDIS_PARAMS",".","copy","(",")","params",".","update","(","settings",".","getdict","(","'REDIS_PARAMS'",")",")","# XXX: Deprecate REDIS_* settings.","for","source",",","dest","in","SETTINGS_PARAMS_MAP",".","items","(",")",":","val","=","settings",".","get","(","source",")","if","val",":","params","[","dest","]","=","val","# Allow ``redis_cls`` to be a path to a class.","if","isinstance","(","params",".","get","(","'redis_cls'",")",",","six",".","string_types",")",":","params","[","'redis_cls'","]","=","load_object","(","params","[","'redis_cls'","]",")","return","get_redis","(","*","*","params",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/connection.py#L17-L60"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/connection.py","language":"python","identifier":"get_redis","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Returns a redis client instance.\n\n    Parameters\n    ----------\n    redis_cls : class, optional\n        Defaults to ``redis.StrictRedis``.\n    url : str, optional\n        If given, ``redis_cls.from_url`` is used to instantiate the class.\n    **kwargs\n        Extra parameters to be passed to the ``redis_cls`` class.\n\n    Returns\n    -------\n    server\n        Redis client instance.","docstring_summary":"Returns a redis client instance.","docstring_tokens":["Returns","a","redis","client","instance","."],"function":"def get_redis(**kwargs):\n    \"\"\"Returns a redis client instance.\n\n    Parameters\n    ----------\n    redis_cls : class, optional\n        Defaults to ``redis.StrictRedis``.\n    url : str, optional\n        If given, ``redis_cls.from_url`` is used to instantiate the class.\n    **kwargs\n        Extra parameters to be passed to the ``redis_cls`` class.\n\n    Returns\n    -------\n    server\n        Redis client instance.\n\n    \"\"\"\n    redis_cls = kwargs.pop('redis_cls', defaults.REDIS_CLS)\n    url = kwargs.pop('url', None)\n    if url:\n        return redis_cls.from_url(url, **kwargs)\n    else:\n        return redis_cls(**kwargs)","function_tokens":["def","get_redis","(","*","*","kwargs",")",":","redis_cls","=","kwargs",".","pop","(","'redis_cls'",",","defaults",".","REDIS_CLS",")","url","=","kwargs",".","pop","(","'url'",",","None",")","if","url",":","return","redis_cls",".","from_url","(","url",",","*","*","kwargs",")","else",":","return","redis_cls","(","*","*","kwargs",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/connection.py#L67-L90"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.start_requests","parameters":"(self)","argument_list":"","return_statement":"return self.next_requests()","docstring":"Returns a batch of start requests from redis.","docstring_summary":"Returns a batch of start requests from redis.","docstring_tokens":["Returns","a","batch","of","start","requests","from","redis","."],"function":"def start_requests(self):\n        \"\"\"Returns a batch of start requests from redis.\"\"\"\n        return self.next_requests()","function_tokens":["def","start_requests","(","self",")",":","return","self",".","next_requests","(",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L18-L20"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.setup_redis","parameters":"(self, crawler=None)","argument_list":"","return_statement":"","docstring":"Setup redis connection and idle signal.\n\n        This should be called after the spider has set its crawler object.","docstring_summary":"Setup redis connection and idle signal.","docstring_tokens":["Setup","redis","connection","and","idle","signal","."],"function":"def setup_redis(self, crawler=None):\n        \"\"\"Setup redis connection and idle signal.\n\n        This should be called after the spider has set its crawler object.\n        \"\"\"\n        if self.server is not None:\n            return\n\n        if crawler is None:\n            # We allow optional crawler argument to keep backwards\n            # compatibility.\n            # XXX: Raise a deprecation warning.\n            crawler = getattr(self, 'crawler', None)\n\n        if crawler is None:\n            raise ValueError(\"crawler is required\")\n\n        settings = crawler.settings\n\n        if self.redis_key is None:\n            self.redis_key = settings.get(\n                'REDIS_START_URLS_KEY', defaults.START_URLS_KEY,\n            )\n\n        self.redis_key = self.redis_key % {'name': self.name}\n\n        if not self.redis_key.strip():\n            raise ValueError(\"redis_key must not be empty\")\n\n        if self.redis_batch_size is None:\n            # TODO: Deprecate this setting (REDIS_START_URLS_BATCH_SIZE).\n            self.redis_batch_size = settings.getint(\n                'REDIS_START_URLS_BATCH_SIZE',\n                settings.getint('CONCURRENT_REQUESTS'),\n            )\n\n        try:\n            self.redis_batch_size = int(self.redis_batch_size)\n        except (TypeError, ValueError):\n            raise ValueError(\"redis_batch_size must be an integer\")\n\n        if self.redis_encoding is None:\n            self.redis_encoding = settings.get('REDIS_ENCODING', defaults.REDIS_ENCODING)\n\n        self.logger.info(\"Reading start URLs from redis key '%(redis_key)s' \"\n                         \"(batch size: %(redis_batch_size)s, encoding: %(redis_encoding)s\",\n                         self.__dict__)\n\n        self.server = connection.from_settings(crawler.settings)\n        # The idle signal is called when the spider has no requests left,\n        # that's when we will schedule new requests from redis queue\n        crawler.signals.connect(self.spider_idle, signal=signals.spider_idle)","function_tokens":["def","setup_redis","(","self",",","crawler","=","None",")",":","if","self",".","server","is","not","None",":","return","if","crawler","is","None",":","# We allow optional crawler argument to keep backwards","# compatibility.","# XXX: Raise a deprecation warning.","crawler","=","getattr","(","self",",","'crawler'",",","None",")","if","crawler","is","None",":","raise","ValueError","(","\"crawler is required\"",")","settings","=","crawler",".","settings","if","self",".","redis_key","is","None",":","self",".","redis_key","=","settings",".","get","(","'REDIS_START_URLS_KEY'",",","defaults",".","START_URLS_KEY",",",")","self",".","redis_key","=","self",".","redis_key","%","{","'name'",":","self",".","name","}","if","not","self",".","redis_key",".","strip","(",")",":","raise","ValueError","(","\"redis_key must not be empty\"",")","if","self",".","redis_batch_size","is","None",":","# TODO: Deprecate this setting (REDIS_START_URLS_BATCH_SIZE).","self",".","redis_batch_size","=","settings",".","getint","(","'REDIS_START_URLS_BATCH_SIZE'",",","settings",".","getint","(","'CONCURRENT_REQUESTS'",")",",",")","try",":","self",".","redis_batch_size","=","int","(","self",".","redis_batch_size",")","except","(","TypeError",",","ValueError",")",":","raise","ValueError","(","\"redis_batch_size must be an integer\"",")","if","self",".","redis_encoding","is","None",":","self",".","redis_encoding","=","settings",".","get","(","'REDIS_ENCODING'",",","defaults",".","REDIS_ENCODING",")","self",".","logger",".","info","(","\"Reading start URLs from redis key '%(redis_key)s' \"","\"(batch size: %(redis_batch_size)s, encoding: %(redis_encoding)s\"",",","self",".","__dict__",")","self",".","server","=","connection",".","from_settings","(","crawler",".","settings",")","# The idle signal is called when the spider has no requests left,","# that's when we will schedule new requests from redis queue","crawler",".","signals",".","connect","(","self",".","spider_idle",",","signal","=","signals",".","spider_idle",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L22-L73"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.next_requests","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Returns a request to be scheduled or none.","docstring_summary":"Returns a request to be scheduled or none.","docstring_tokens":["Returns","a","request","to","be","scheduled","or","none","."],"function":"def next_requests(self):\n        \"\"\"Returns a request to be scheduled or none.\"\"\"\n        use_set = self.settings.getbool('REDIS_START_URLS_AS_SET', defaults.START_URLS_AS_SET)\n        fetch_one = self.server.spop if use_set else self.server.lpop\n        # XXX: Do we need to use a timeout here?\n        found = 0\n        # TODO: Use redis pipeline execution.\n        while found < self.redis_batch_size:\n            data = fetch_one(self.redis_key)\n            if not data:\n                # Queue empty.\n                break\n            req = self.make_request_from_data(data)\n            if req:\n                yield req\n                found += 1\n            else:\n                self.logger.debug(\"Request not made from data: %r\", data)\n\n        if found:\n            self.logger.debug(\"Read %s requests from '%s'\", found, self.redis_key)","function_tokens":["def","next_requests","(","self",")",":","use_set","=","self",".","settings",".","getbool","(","'REDIS_START_URLS_AS_SET'",",","defaults",".","START_URLS_AS_SET",")","fetch_one","=","self",".","server",".","spop","if","use_set","else","self",".","server",".","lpop","# XXX: Do we need to use a timeout here?","found","=","0","# TODO: Use redis pipeline execution.","while","found","<","self",".","redis_batch_size",":","data","=","fetch_one","(","self",".","redis_key",")","if","not","data",":","# Queue empty.","break","req","=","self",".","make_request_from_data","(","data",")","if","req",":","yield","req","found","+=","1","else",":","self",".","logger",".","debug","(","\"Request not made from data: %r\"",",","data",")","if","found",":","self",".","logger",".","debug","(","\"Read %s requests from '%s'\"",",","found",",","self",".","redis_key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L75-L95"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.make_request_from_data","parameters":"(self, data)","argument_list":"","return_statement":"return self.make_requests_from_url(url)","docstring":"Returns a Request instance from data coming from Redis.\n\n        By default, ``data`` is an encoded URL. You can override this method to\n        provide your own message decoding.\n\n        Parameters\n        ----------\n        data : bytes\n            Message from redis.","docstring_summary":"Returns a Request instance from data coming from Redis.","docstring_tokens":["Returns","a","Request","instance","from","data","coming","from","Redis","."],"function":"def make_request_from_data(self, data):\n        \"\"\"Returns a Request instance from data coming from Redis.\n\n        By default, ``data`` is an encoded URL. You can override this method to\n        provide your own message decoding.\n\n        Parameters\n        ----------\n        data : bytes\n            Message from redis.\n\n        \"\"\"\n        url = bytes_to_str(data, self.redis_encoding)\n        return self.make_requests_from_url(url)","function_tokens":["def","make_request_from_data","(","self",",","data",")",":","url","=","bytes_to_str","(","data",",","self",".","redis_encoding",")","return","self",".","make_requests_from_url","(","url",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L97-L110"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.schedule_next_requests","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Schedules a request if available","docstring_summary":"Schedules a request if available","docstring_tokens":["Schedules","a","request","if","available"],"function":"def schedule_next_requests(self):\n        \"\"\"Schedules a request if available\"\"\"\n        # TODO: While there is capacity, schedule a batch of redis requests.\n        for req in self.next_requests():\n            self.crawler.engine.crawl(req, spider=self)","function_tokens":["def","schedule_next_requests","(","self",")",":","# TODO: While there is capacity, schedule a batch of redis requests.","for","req","in","self",".","next_requests","(",")",":","self",".","crawler",".","engine",".","crawl","(","req",",","spider","=","self",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L112-L116"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/spiders.py","language":"python","identifier":"RedisMixin.spider_idle","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Schedules a request if available, otherwise waits.","docstring_summary":"Schedules a request if available, otherwise waits.","docstring_tokens":["Schedules","a","request","if","available","otherwise","waits","."],"function":"def spider_idle(self):\n        \"\"\"Schedules a request if available, otherwise waits.\"\"\"\n        # XXX: Handle a sentinel to close the spider.\n        self.schedule_next_requests()\n        raise DontCloseSpider","function_tokens":["def","spider_idle","(","self",")",":","# XXX: Handle a sentinel to close the spider.","self",".","schedule_next_requests","(",")","raise","DontCloseSpider"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/spiders.py#L118-L122"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/utils.py","language":"python","identifier":"bytes_to_str","parameters":"(s, encoding='utf-8')","argument_list":"","return_statement":"return s","docstring":"Returns a str if a bytes object is given.","docstring_summary":"Returns a str if a bytes object is given.","docstring_tokens":["Returns","a","str","if","a","bytes","object","is","given","."],"function":"def bytes_to_str(s, encoding='utf-8'):\n    \"\"\"Returns a str if a bytes object is given.\"\"\"\n    if six.PY3 and isinstance(s, bytes):\n        return s.decode(encoding)\n    return s","function_tokens":["def","bytes_to_str","(","s",",","encoding","=","'utf-8'",")",":","if","six",".","PY3","and","isinstance","(","s",",","bytes",")",":","return","s",".","decode","(","encoding",")","return","s"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/utils.py#L4-L8"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.__init__","parameters":"(self, server, key, debug=False)","argument_list":"","return_statement":"","docstring":"Initialize the duplicates filter.\n\n        Parameters\n        ----------\n        server : redis.StrictRedis\n            The redis server instance.\n        key : str\n            Redis key Where to store fingerprints.\n        debug : bool, optional\n            Whether to log filtered requests.","docstring_summary":"Initialize the duplicates filter.","docstring_tokens":["Initialize","the","duplicates","filter","."],"function":"def __init__(self, server, key, debug=False):\n        \"\"\"Initialize the duplicates filter.\n\n        Parameters\n        ----------\n        server : redis.StrictRedis\n            The redis server instance.\n        key : str\n            Redis key Where to store fingerprints.\n        debug : bool, optional\n            Whether to log filtered requests.\n\n        \"\"\"\n        self.server = server\n        self.key = key\n        self.debug = debug\n        self.bf = BloomFilter(server, key, blockNum=1)  # you can increase blockNum if your are filtering too many urls\n        self.logdupes = True","function_tokens":["def","__init__","(","self",",","server",",","key",",","debug","=","False",")",":","self",".","server","=","server","self",".","key","=","key","self",".","debug","=","debug","self",".","bf","=","BloomFilter","(","server",",","key",",","blockNum","=","1",")","# you can increase blockNum if your are filtering too many urls","self",".","logdupes","=","True"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L25-L42"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.from_settings","parameters":"(cls, settings)","argument_list":"","return_statement":"return cls(server, key=key, debug=debug)","docstring":"Returns an instance from given settings.\n\n        This uses by default the key ``dupefilter:<timestamp>``. When using the\n        ``scrapy_redis.scheduler.Scheduler`` class, this method is not used as\n        it needs to pass the spider name in the key.\n\n        Parameters\n        ----------\n        settings : scrapy.settings.Settings\n\n        Returns\n        -------\n        RFPDupeFilter\n            A RFPDupeFilter instance.","docstring_summary":"Returns an instance from given settings.","docstring_tokens":["Returns","an","instance","from","given","settings","."],"function":"def from_settings(cls, settings):\n        \"\"\"Returns an instance from given settings.\n\n        This uses by default the key ``dupefilter:<timestamp>``. When using the\n        ``scrapy_redis.scheduler.Scheduler`` class, this method is not used as\n        it needs to pass the spider name in the key.\n\n        Parameters\n        ----------\n        settings : scrapy.settings.Settings\n\n        Returns\n        -------\n        RFPDupeFilter\n            A RFPDupeFilter instance.\n\n\n        \"\"\"\n        server = get_redis_from_settings(settings)\n        # XXX: This creates one-time key. needed to support to use this\n        # class as standalone dupefilter with scrapy's default scheduler\n        # if scrapy passes spider on open() method this wouldn't be needed\n        # TODO: Use SCRAPY_JOB env as default and fallback to timestamp.\n        key = defaults.DUPEFILTER_KEY % {'timestamp': int(time.time())}\n        debug = settings.getbool('DUPEFILTER_DEBUG')\n        return cls(server, key=key, debug=debug)","function_tokens":["def","from_settings","(","cls",",","settings",")",":","server","=","get_redis_from_settings","(","settings",")","# XXX: This creates one-time key. needed to support to use this","# class as standalone dupefilter with scrapy's default scheduler","# if scrapy passes spider on open() method this wouldn't be needed","# TODO: Use SCRAPY_JOB env as default and fallback to timestamp.","key","=","defaults",".","DUPEFILTER_KEY","%","{","'timestamp'",":","int","(","time",".","time","(",")",")","}","debug","=","settings",".","getbool","(","'DUPEFILTER_DEBUG'",")","return","cls","(","server",",","key","=","key",",","debug","=","debug",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L45-L70"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.from_crawler","parameters":"(cls, crawler)","argument_list":"","return_statement":"return cls.from_settings(crawler.settings)","docstring":"Returns instance from crawler.\n\n        Parameters\n        ----------\n        crawler : scrapy.crawler.Crawler\n\n        Returns\n        -------\n        RFPDupeFilter\n            Instance of RFPDupeFilter.","docstring_summary":"Returns instance from crawler.","docstring_tokens":["Returns","instance","from","crawler","."],"function":"def from_crawler(cls, crawler):\n        \"\"\"Returns instance from crawler.\n\n        Parameters\n        ----------\n        crawler : scrapy.crawler.Crawler\n\n        Returns\n        -------\n        RFPDupeFilter\n            Instance of RFPDupeFilter.\n\n        \"\"\"\n        return cls.from_settings(crawler.settings)","function_tokens":["def","from_crawler","(","cls",",","crawler",")",":","return","cls",".","from_settings","(","crawler",".","settings",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L73-L86"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.request_seen","parameters":"(self, request)","argument_list":"","return_statement":"","docstring":"Returns True if request was already seen.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n\n        Returns\n        -------\n        bool","docstring_summary":"Returns True if request was already seen.","docstring_tokens":["Returns","True","if","request","was","already","seen","."],"function":"def request_seen(self, request):\n        \"\"\"Returns True if request was already seen.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n\n        Returns\n        -------\n        bool\n\n        \"\"\"\n        fp = request_fingerprint(request)\n        if self.bf.isContains(fp):\n            return True\n        else:\n            self.bf.insert(fp)\n            return False","function_tokens":["def","request_seen","(","self",",","request",")",":","fp","=","request_fingerprint","(","request",")","if","self",".","bf",".","isContains","(","fp",")",":","return","True","else",":","self",".","bf",".","insert","(","fp",")","return","False"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L88-L105"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.request_fingerprint","parameters":"(self, request)","argument_list":"","return_statement":"return request_fingerprint(request)","docstring":"Returns a fingerprint for a given request.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n\n        Returns\n        -------\n        str","docstring_summary":"Returns a fingerprint for a given request.","docstring_tokens":["Returns","a","fingerprint","for","a","given","request","."],"function":"def request_fingerprint(self, request):\n        \"\"\"Returns a fingerprint for a given request.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n\n        Returns\n        -------\n        str\n\n        \"\"\"\n        return request_fingerprint(request)","function_tokens":["def","request_fingerprint","(","self",",","request",")",":","return","request_fingerprint","(","request",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L107-L119"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.close","parameters":"(self, reason='')","argument_list":"","return_statement":"","docstring":"Delete data on close. Called by Scrapy's scheduler.\n\n        Parameters\n        ----------\n        reason : str, optional","docstring_summary":"Delete data on close. Called by Scrapy's scheduler.","docstring_tokens":["Delete","data","on","close",".","Called","by","Scrapy","s","scheduler","."],"function":"def close(self, reason=''):\n        \"\"\"Delete data on close. Called by Scrapy's scheduler.\n\n        Parameters\n        ----------\n        reason : str, optional\n\n        \"\"\"\n        self.clear()","function_tokens":["def","close","(","self",",","reason","=","''",")",":","self",".","clear","(",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L121-L129"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.clear","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Clears fingerprints data.","docstring_summary":"Clears fingerprints data.","docstring_tokens":["Clears","fingerprints","data","."],"function":"def clear(self):\n        \"\"\"Clears fingerprints data.\"\"\"\n        self.server.delete(self.key)","function_tokens":["def","clear","(","self",")",":","self",".","server",".","delete","(","self",".","key",")"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L131-L133"}
{"nwo":"AlexTan-b-z\/ZhihuSpider","sha":"7f35d157fa7f3a7ac8545b386e98286ee2764462","path":"zhihu\/zhihu\/scrapy_redis\/dupefilter.py","language":"python","identifier":"RFPDupeFilter.log","parameters":"(self, request, spider)","argument_list":"","return_statement":"","docstring":"Logs given request.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n        spider : scrapy.spiders.Spider","docstring_summary":"Logs given request.","docstring_tokens":["Logs","given","request","."],"function":"def log(self, request, spider):\n        \"\"\"Logs given request.\n\n        Parameters\n        ----------\n        request : scrapy.http.Request\n        spider : scrapy.spiders.Spider\n\n        \"\"\"\n        if self.debug:\n            msg = \"Filtered duplicate request: %(request)s\"\n            self.logger.debug(msg, {'request': request}, extra={'spider': spider})\n        elif self.logdupes:\n            msg = (\"Filtered duplicate request %(request)s\"\n                   \" - no more duplicates will be shown\"\n                   \" (see DUPEFILTER_DEBUG to show all duplicates)\")\n            self.logger.debug(msg, {'request': request}, extra={'spider': spider})\n            self.logdupes = False","function_tokens":["def","log","(","self",",","request",",","spider",")",":","if","self",".","debug",":","msg","=","\"Filtered duplicate request: %(request)s\"","self",".","logger",".","debug","(","msg",",","{","'request'",":","request","}",",","extra","=","{","'spider'",":","spider","}",")","elif","self",".","logdupes",":","msg","=","(","\"Filtered duplicate request %(request)s\"","\" - no more duplicates will be shown\"","\" (see DUPEFILTER_DEBUG to show all duplicates)\"",")","self",".","logger",".","debug","(","msg",",","{","'request'",":","request","}",",","extra","=","{","'spider'",":","spider","}",")","self",".","logdupes","=","False"],"url":"https:\/\/github.com\/AlexTan-b-z\/ZhihuSpider\/blob\/7f35d157fa7f3a7ac8545b386e98286ee2764462\/zhihu\/zhihu\/scrapy_redis\/dupefilter.py#L135-L152"}