repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
zdomjus60/astrometry
|
tools.py
|
1
|
10051
|
# -*- coding: utf-8 -*-
""" helper functions for time management
"""
import math
def sin(x):
return math.sin(math.radians(x))
def cos(x):
return math.cos(math.radians(x))
def atan2(y , x):
return math.degrees(math.atan2(y, x))
def reduce360(x):
return x % 360.0
def dms2ddd(hour, minute, second):
""" from sexagesimal to decimal """
return hour+minute/60.0+second/3600.0
def ddd2dms(dec_hour):
""" from decimal to sexagesimal representation of hours and angles."""
if dec_hour < 0:
sign = -1
dec_hour *= sign
else:
sign = 1
total_seconds = int(dec_hour * 3600.0+.5)
seconds = total_seconds % 60
total_minutes = int((total_seconds - seconds)/60.0)
minutes = total_minutes % 60
hours = int((total_minutes - minutes)/60.0)
return (hours * sign, minutes * sign, seconds * sign)
def cal2jul(year, month, day, hour=0, minute=0, second=0):
""" converts calendar date to julian date
this routine and the following are built following Duffet Smith /Zwart instructions
as given in Peter Duffett-Smith-Zwart Practical Astronomy with your Calculator or Spreadsheet
Fourth Edition, Cambridge University Press, Fourth Ed. 2011
For an easier use of the function, hours minutes and seconds are defaulted to 0, so it's
not necessary to give them as parameters when the hour is 00:00:00
"""
month2 = month
year2 = year
if month2 <= 2:
year2 -= 1
month2 += 12
else:
pass
if (year*10000 + month*100 + day) >= 15821015:
a = math.trunc(year2/100.0)
b = 2 - a + math.trunc(a/4.0)
else:
a = 0
b = 0
if year < 0:
c = math.trunc((365.25 * year2)-0.75)
else:
c = math.trunc(365.25 * year2)
d = math.trunc(30.6001 *(month2 + 1))
return b + c + d + day + hour / 24.0 + minute / 1440.0 + second / 86400.0 + 1720994.5
def jul2cal(jd):
""" converts julian date to calendar date """
jd += 0.5
i = math.modf(jd)[1]
f = math.modf(jd)[0]
if i > 2299160:
a = math.trunc((i-1867216.25)/36524.25)
b = i + a - math.trunc(a/4)+1
else:
b = i
c = b + 1524
d = math.trunc((c-122.1)/365.25)
e = math.trunc(365.25 * d)
g = math.trunc((c-e)/30.6001)
day = c-e+f-math.trunc(30.6001*g)
if g < 13.5:
month = g - 1
else:
month = g - 13
if month > 2.5:
year = d - 4716
else:
year = d - 4715
hours_frac = math.modf(day)[0]*24
day = int(day)
hour, minute, second = ddd2dms(hours_frac)
return (year, month, day, hour, minute, second)
def day_of_the_week(year, month, day):
""" given a calendar date, the routine returns a tuple with the Day Of The Week in number and in plaintext
0 for Sunday 1 for Monday and so on up to 6 Saturday
"""
doth = {0:'Sunday', 1:'Monday', 2:'Tuesday',
3:'Wednesday', 4:'Thursday', 5:'Friday',
6:'Saturday'}
jd = cal2jul(year, month, day, 0, 0, 0)
a = (jd+1.5)/7
f = math.trunc((a % 1)*7 +.5)
return (f,doth[f])
def lt2ut(year, month, day, hour=0, minute=0, second=0, timezone=0, DS=0):
""" Given, for a location on the Earth,a date, a time, a timezone (East + West - in hours) and the Daylight
Savings (0 normal time 1 Daylight Savings), this routine gives back a calendar date in Universal Time
representation (year, month, day, hour, minute, second).
It aims to restore a common date and time for all places in the Earth. Timezone and
Daylight Savings can be automized knowing the location using the pytz module (Olson
database)
"""
ut = dms2ddd(hour,minute,second) - timezone - DS
greenwich_calendar_date = day + ut/24
jd = cal2jul(year, month, greenwich_calendar_date)
greenwich_calendar_date = jul2cal(jd)
return greenwich_calendar_date
def ut2lt(year, month, day, hour=0, minute=0, second=0, timezone=0, DS=0):
""" Given a date, a time for Greenwich in UT format this routine gives back a calendar date
in local time representation (year, month, day, hour, minute, second).
It's the inverse function of the previous formula
"""
lt = dms2ddd(hour,minute,second) + timezone +DS
local_calendar_date = day + lt/24
jd = cal2jul(year, month, local_calendar_date)
local_calendar_date = jul2cal(jd)
return local_calendar_date
def ut2gst(year, month, day, hour, minute, second):
""" Sidereal time is a time-keeping system astronomers use to keep track of the direction to point
their telescopes to view a given star in the night sky.
Briefly, sidereal time is a "time scale that is based on the Earth's rate of rotation measured
relative to the fixed stars." (source Wikipedia)
This routine converts Universal Time to Sidereal Time for Greenwich (Greenwich Sidereal Time)
"""
jd = cal2jul(year, month, day)
S = jd - 2451545.0
T = S/36525.0
T0 = (6.697374558 + (2400.051336 * T)+ 0.000025862 *T*T) % 24
UT = dms2ddd(hour, minute, second)*1.002737909
GST = ddd2dms((UT + T0) % 24)
return GST
def gst2ut( year, month, day, hour, minute, second):
""" Inverse of the previous function
"""
jd = cal2jul(year, month, day, 0,0,0)
S = jd - 2451545.0
T = S/36525.0
T0 = (6.697374558 + 2400.051336 * T + 0.000025862 *T*T) % 24
GST = (dms2ddd(hour, minute, second) - T0) % 24
while GST <0:
GST += 24
UT = GST * .9972695663
return ddd2dms(UT)
def gst2lst( hour, minute, second, long_degree, long_minute, long_second=0):
""" Corrects GST for a different location on the Earth
"""
GST = dms2ddd(hour,minute,second)
lg = dms2ddd(long_degree, long_minute, long_second)/15.0
lst = ddd2dms((GST + lg) % 24)
return lst
def lst2gst( hour, minute, second, long_degree, long_minute, long_second=0):
""" Inverse of the previous method
"""
lst = dms2ddd(hour,minute,second)
lg = dms2ddd(long_degree, long_minute, long_second)/15.0
GST = ddd2dms((lst + lg) % 24)
return GST
def julian_centuries(year, month, day, hour=0, minute =0, second=0):
d1 = cal2jul(year, month, day, hour, minute, second)
d2 = cal2jul(2000,1,1,12)
return (d1-d2) / 36525.0
def julian_millennia(year, month, day, hour=0, minute =0, second=0):
return julian_centuries(year, month, day, hour, minute, second) / 10.0
def julian_decamillennia(year, month, day, hour=0, minute =0, second=0):
return julian_centuries(year, month, day, hour, minute, second) / 100.0
def obl_ecl_JPL(year, month, day, hour=0, minute = 0, second = 0):
t = julian_centuries(year, month, day, hour, minute, second)
""" from JPL Astronomical Almanac 2010 """
return (23 * 3600 + 26*60 + 21.406
- 46.836769 * t
- 0.0001831 * t * t
+ 0.00200340 * t * t * t
- 0.576e-6 * t * t * t * t
- 4.34e-8 * t * t * t * t * t) / 3600.0
def obl_ecl_Laskar(year, month, day, hour = 0, minute = 0, second = 0):
"""
Original work from Jay Tanner
- converted to Python code by Domenico Mustara 2015
This PHP function computes the mean obliquity of the ecliptic
given a JD argument corresponding to any given date and time.
Author: Jay Tanner - 2010
The algorithm used here is based on work published by J. Laskar
Astronomy and Astrophysics, Vol 157, p68 (1986),
New Formulas for the Precession, Valid Over 10000 years,
Table 8.
Source code provided under the provisions of the
GNU Affero General Public License (AGPL), version 3.
http://www.gnu.org/licenses/agpl.html
// -----------------------------------------------------------
// Compute the (t) value in Julian decamillennia corresponding
// to the JD argument and reckoned from J2000.
$t = ($JD - 2451545.0) / 3652500.0;
// --------------------------------------
"""
t = julian_decamillennia(year, month, day, hour, minute, second)
w = 84381.448
w -= 4680.93 * t
w -= 1.55 * t * t
w += 1999.25 * t * t * t
w -= 51.38 * t * t * t * t
w -= 249.67 * t * t * t * t * t
w -= 39.05 * t * t * t * t * t * t
w += 7.12 * t * t * t * t * t * t * t
w += 27.87 * t * t * t * t * t * t * t * t
w += 5.79 * t * t * t * t * t * t * t * t * t
w += 2.45 * t * t * t * t * t * t * t * t * t * t
return w / 3600.0
""" Some conversion utilities between various coordinate systems """
def sph_ecl2rect_ecl(r, longitude, latitude):
x = r * cos(latitude) * cos(longitude)
y = r * cos(latitude) * sin(longitude)
z = r * sin(latitude)
return (x,y,z)
def rect_ecl2sph_ecl(x,y,z):
r = math.sqrt(x*x + y*y + z*z)
longitude = atan2(y,x)
latitude = atan2(z, math.sqrt(x*x + y*y))
return (r, longitude, latitude)
def sph_equat2rect_equat(r, RA, Declination):
x = r * cos(RA) * cos(Declination)
y = r * sin(RA) * cos(Declination)
z = r * sin(Declination)
return (x,y,x)
def rect_equat2sph_equat(x,y,z):
r = math.sqrt(x*x + y*y +z*z)
RA = atan2(y, x)
Decl = atan2(z, math.sqrt(x*x + y*y))
return (r, RA, Decl)
def rect_ecl2rect_equat(xeclip, yeclip, zeclip, year, month, day, hour = 0, minute = 0, second = 0):
oblecl = obl_ecl_JPL(year, month, day, hour, minute, second)
xequat = xeclip
yequat = yeclip * cos(oblecl) - zeclip * sin(oblecl)
zequat = yeclip * sin(oblecl) + zeclip * cos(oblecl)
return (xequat, yequat, zequat)
def rect_equat2rect_ecl(xequat, yequat, zequat, year, month, day, hour = 0, minute = 0, second = 0):
oblecl = obl_ecl_JPL(year, month, day, hour, minute, second)
xeclip = xequat
yeclip = yequat * cos(- oblecl) - zequat * sin(- oblecl)
zeclip = yequat * sin(- oblecl) + zequat * cos(- oblecl)
return (xeclip, yeclip, zeclip)
|
cc0-1.0
| -7,099,347,639,674,084,000 | 34.641844 | 111 | 0.594369 | false | 2.931175 | false | false | false |
stoeckli/iMatrixSpray
|
octoprint/printer.py
|
1
|
20362
|
# coding=utf-8
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import time
import datetime
import threading
import copy
import os
#import logging, logging.config
import octoprint.util.comm as comm
import octoprint.util as util
from octoprint.settings import settings
from octoprint.events import eventManager
def getConnectionOptions():
"""
Retrieves the available ports, baudrates, prefered port and baudrate for connecting to the printer.
"""
return {
"ports": comm.serialList(),
"baudrates": comm.baudrateList(),
"portPreference": settings().get(["serial", "port"]),
"baudratePreference": settings().getInt(["serial", "baudrate"]),
"autoconnect": settings().getBoolean(["serial", "autoconnect"])
}
class Printer():
def __init__(self, gcodeManager):
from collections import deque
self._gcodeManager = gcodeManager
self._gcodeManager.registerCallback(self)
# state
self._temp = None
self._bedTemp = None
self._targetTemp = None
self._targetBedTemp = None
self._temps = {
"actual": deque([], 300),
"target": deque([], 300),
"actualBed": deque([], 300),
"targetBed": deque([], 300)
}
self._tempBacklog = []
self._latestMessage = None
self._messages = deque([], 300)
self._messageBacklog = []
self._latestLog = None
self._log = deque([], 300)
self._logBacklog = []
self._state = None
self._currentZ = None
self._progress = None
self._printTime = None
self._printTimeLeft = None
self._printAfterSelect = False
# sd handling
self._sdPrinting = False
self._sdStreaming = False
self._selectedFile = None
# comm
self._comm = None
# callbacks
self._callbacks = []
self._lastProgressReport = None
self._stateMonitor = StateMonitor(
ratelimit=0.5,
updateCallback=self._sendCurrentDataCallbacks,
addTemperatureCallback=self._sendAddTemperatureCallbacks,
addLogCallback=self._sendAddLogCallbacks,
addMessageCallback=self._sendAddMessageCallbacks
)
self._stateMonitor.reset(
state={"state": None, "stateString": self.getStateString(), "flags": self._getStateFlags()},
jobData={"filename": None, "filesize": None, "estimatedSprayTime": None, "filament": None},
progress={"progress": None, "filepos": None, "sprayTime": None, "sprayTimeLeft": None},
currentZ=None
)
#~~ callback handling
def registerCallback(self, callback):
self._callbacks.append(callback)
self._sendInitialStateUpdate(callback)
def unregisterCallback(self, callback):
if callback in self._callbacks:
self._callbacks.remove(callback)
def _sendAddTemperatureCallbacks(self, data):
for callback in self._callbacks:
try: callback.addTemperature(data)
except: pass
def _sendAddLogCallbacks(self, data):
for callback in self._callbacks:
try: callback.addLog(data)
except: pass
def _sendAddMessageCallbacks(self, data):
for callback in self._callbacks:
try: callback.addMessage(data)
except: pass
def _sendCurrentDataCallbacks(self, data):
for callback in self._callbacks:
try: callback.sendCurrentData(copy.deepcopy(data))
except: pass
def _sendTriggerUpdateCallbacks(self, type):
for callback in self._callbacks:
try: callback.sendUpdateTrigger(type)
except: pass
def _sendFeedbackCommandOutput(self, name, output):
for callback in self._callbacks:
try: callback.sendFeedbackCommandOutput(name, output)
except: pass
#~~ callback from gcodemanager
def sendUpdateTrigger(self, type):
if type == "gcodeFiles" and self._selectedFile:
self._setJobData(self._selectedFile["filename"],
self._selectedFile["filesize"],
self._selectedFile["sd"])
#~~ printer commands
def connect(self, port=None, baudrate=None):
"""
Connects to the printer. If port and/or baudrate is provided, uses these settings, otherwise autodetection
will be attempted.
"""
if self._comm is not None:
self._comm.close()
self._comm = comm.MachineCom(port, baudrate, callbackObject=self)
def disconnect(self):
"""
Closes the connection to the printer.
"""
if self._comm is not None:
self._comm.close()
self._comm = None
eventManager().fire("Disconnected")
def command(self, command):
"""
Sends a single gcode command to the printer.
"""
self.commands([command])
def commands(self, commands):
"""
Sends multiple gcode commands (provided as a list) to the printer.
"""
for command in commands:
self._comm.sendCommand(command)
def selectFile(self, filename, sd, printAfterSelect=False):
if self._comm is None or (self._comm.isBusy() or self._comm.isStreaming()):
return
self._printAfterSelect = printAfterSelect
self._comm.selectFile(filename, sd)
self._setProgressData(0, None, None, None)
self._setCurrentZ(None)
def unselectFile(self):
if self._comm is not None and (self._comm.isBusy() or self._comm.isStreaming()):
return
self._comm.unselectFile()
self._setProgressData(0, None, None, None)
self._setCurrentZ(None)
def startPrint(self):
"""
Starts the currently loaded print job.
Only starts if the printer is connected and operational, not currently printing and a printjob is loaded
"""
if self._comm is None or not self._comm.isOperational() or self._comm.isPrinting():
return
if self._selectedFile is None:
return
self._setCurrentZ(None)
self._comm.startPrint()
def togglePausePrint(self):
"""
Pause the current printjob.
"""
if self._comm is None:
return
self._comm.setPause(not self._comm.isPaused())
def cancelPrint(self, disableMotorsAndHeater=True):
"""
Cancel the current printjob.
"""
if self._comm is None:
return
self._comm.cancelPrint()
if disableMotorsAndHeater:
self.commands(["M84", "M104 S0", "M140 S0", "M106 S0"]) # disable motors, switch off heaters and fan
# reset progress, height, print time
self._setCurrentZ(None)
self._setProgressData(None, None, None, None)
# mark print as failure
if self._selectedFile is not None:
self._gcodeManager.printFailed(self._selectedFile["filename"])
eventManager().fire("PrintFailed", self._selectedFile["filename"])
#~~ state monitoring
def _setCurrentZ(self, currentZ):
self._currentZ = currentZ
formattedCurrentZ = None
if self._currentZ:
formattedCurrentZ = "%.2f mm" % (self._currentZ)
self._stateMonitor.setCurrentZ(formattedCurrentZ)
def _setState(self, state):
self._state = state
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def _addLog(self, log):
self._log.append(log)
self._stateMonitor.addLog(log)
def _addMessage(self, message):
self._messages.append(message)
self._stateMonitor.addMessage(message)
def _setProgressData(self, progress, filepos, printTime, printTimeLeft):
self._progress = progress
self._printTime = printTime
self._printTimeLeft = printTimeLeft
formattedPrintTime = None
if (self._printTime):
formattedPrintTime = util.getFormattedTimeDelta(datetime.timedelta(seconds=self._printTime))
formattedPrintTimeLeft = None
if (self._printTimeLeft):
formattedPrintTimeLeft = util.getFormattedTimeDelta(datetime.timedelta(minutes=self._printTimeLeft))
formattedFilePos = None
if (filepos):
formattedFilePos = util.getFormattedSize(filepos)
self._stateMonitor.setProgress({"progress": self._progress, "filepos": formattedFilePos, "printTime": formattedPrintTime, "printTimeLeft": formattedPrintTimeLeft})
def _addTemperatureData(self, temp, bedTemp, targetTemp, bedTargetTemp):
currentTimeUtc = int(time.time() * 1000)
self._temps["actual"].append((currentTimeUtc, temp))
self._temps["target"].append((currentTimeUtc, targetTemp))
self._temps["actualBed"].append((currentTimeUtc, bedTemp))
self._temps["targetBed"].append((currentTimeUtc, bedTargetTemp))
self._temp = temp
self._bedTemp = bedTemp
self._targetTemp = targetTemp
self._targetBedTemp = bedTargetTemp
self._stateMonitor.addTemperature({"currentTime": currentTimeUtc, "temp": self._temp, "bedTemp": self._bedTemp, "targetTemp": self._targetTemp, "targetBedTemp": self._targetBedTemp})
def _setJobData(self, filename, filesize, sd):
if filename is not None:
self._selectedFile = {
"filename": filename,
"filesize": filesize,
"sd": sd
}
else:
self._selectedFile = None
formattedFilename = None
formattedFilesize = None
estimatedPrintTime = None
fileMTime = None
filament = None
if filename:
formattedFilename = os.path.basename(filename)
# Use a string for mtime because it could be float and the
# javascript needs to exact match
if not sd:
fileMTime = str(os.stat(filename).st_mtime)
if filesize:
formattedFilesize = util.getFormattedSize(filesize)
fileData = self._gcodeManager.getFileData(filename)
if fileData is not None and "gcodeAnalysis" in fileData.keys():
if "estimatedPrintTime" in fileData["gcodeAnalysis"].keys():
estimatedPrintTime = fileData["gcodeAnalysis"]["estimatedPrintTime"]
if "filament" in fileData["gcodeAnalysis"].keys():
filament = fileData["gcodeAnalysis"]["filament"]
self._stateMonitor.setJobData({"filename": formattedFilename, "filesize": formattedFilesize, "estimatedPrintTime": estimatedPrintTime, "filament": filament, "sd": sd, "mtime": fileMTime})
def _sendInitialStateUpdate(self, callback):
try:
data = self._stateMonitor.getCurrentData()
# convert the dict of deques to a dict of lists
temps = {k: list(v) for (k,v) in self._temps.iteritems()}
data.update({
"temperatureHistory": temps,
"logHistory": list(self._log),
"messageHistory": list(self._messages)
})
callback.sendHistoryData(data)
except Exception, err:
import sys
sys.stderr.write("ERROR: %s\n" % str(err))
pass
def _getStateFlags(self):
if not settings().getBoolean(["feature", "sdSupport"]) or self._comm is None:
sdReady = False
else:
sdReady = self._comm.isSdReady()
return {
"operational": self.isOperational(),
"printing": self.isPrinting(),
"closedOrError": self.isClosedOrError(),
"error": self.isError(),
"paused": self.isPaused(),
"ready": self.isReady(),
"sdReady": sdReady
}
def getCurrentData(self):
return self._stateMonitor.getCurrentData()
#~~ callbacks triggered from self._comm
def mcLog(self, message):
"""
Callback method for the comm object, called upon log output.
"""
self._addLog(message)
def mcTempUpdate(self, temp, bedTemp, targetTemp, bedTargetTemp):
self._addTemperatureData(temp, bedTemp, targetTemp, bedTargetTemp)
def mcStateChange(self, state):
"""
Callback method for the comm object, called if the connection state changes.
"""
oldState = self._state
# forward relevant state changes to gcode manager
if self._comm is not None and oldState == self._comm.STATE_PRINTING:
if self._selectedFile is not None:
if state == self._comm.STATE_OPERATIONAL:
self._gcodeManager.printSucceeded(self._selectedFile["filename"])
elif state == self._comm.STATE_CLOSED or state == self._comm.STATE_ERROR or state == self._comm.STATE_CLOSED_WITH_ERROR:
self._gcodeManager.printFailed(self._selectedFile["filename"])
self._gcodeManager.resumeAnalysis() # printing done, put those cpu cycles to good use
elif self._comm is not None and state == self._comm.STATE_PRINTING:
self._gcodeManager.pauseAnalysis() # do not analyse gcode while printing
self._setState(state)
def mcMessage(self, message):
"""
Callback method for the comm object, called upon message exchanges via serial.
Stores the message in the message buffer, truncates buffer to the last 300 lines.
"""
self._addMessage(message)
def mcProgress(self):
"""
Callback method for the comm object, called upon any change in progress of the printjob.
Triggers storage of new values for printTime, printTimeLeft and the current progress.
"""
self._setProgressData(self._comm.getPrintProgress(), self._comm.getPrintFilepos(), self._comm.getPrintTime(), self._comm.getPrintTimeRemainingEstimate())
def mcZChange(self, newZ):
"""
Callback method for the comm object, called upon change of the z-layer.
"""
oldZ = self._currentZ
if newZ != oldZ:
# we have to react to all z-changes, even those that might "go backward" due to a slicer's retraction or
# anti-backlash-routines. Event subscribes should individually take care to filter out "wrong" z-changes
eventManager().fire("ZChange", newZ)
self._setCurrentZ(newZ)
def mcSdStateChange(self, sdReady):
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcSdFiles(self, files):
self._sendTriggerUpdateCallbacks("gcodeFiles")
def mcFileSelected(self, filename, filesize, sd):
self._setJobData(filename, filesize, sd)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
if self._printAfterSelect:
self.startPrint()
def mcPrintjobDone(self):
self._setProgressData(1.0, self._selectedFile["filesize"], self._comm.getPrintTime(), 0)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcFileTransferStarted(self, filename, filesize):
self._sdStreaming = True
self._setJobData(filename, filesize, True)
self._setProgressData(0.0, 0, 0, None)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcFileTransferDone(self):
self._sdStreaming = False
self._setCurrentZ(None)
self._setJobData(None, None, None)
self._setProgressData(None, None, None, None)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcReceivedRegisteredMessage(self, command, output):
self._sendFeedbackCommandOutput(command, output)
#~~ sd file handling
def getSdFiles(self):
if self._comm is None:
return
return self._comm.getSdFiles()
def addSdFile(self, filename, path):
if not self._comm or self._comm.isBusy():
return
self._comm.startFileTransfer(path, filename[:8].lower() + ".gco")
def deleteSdFile(self, filename):
if not self._comm:
return
self._comm.deleteSdFile(filename)
def initSdCard(self):
if not self._comm:
return
self._comm.initSdCard()
def releaseSdCard(self):
if not self._comm:
return
self._comm.releaseSdCard()
def refreshSdFiles(self):
if not self._comm:
return
self._comm.refreshSdFiles()
#~~ state reports
def getStateString(self):
"""
Returns a human readable string corresponding to the current communication state.
"""
if self._comm is None:
return "Offline"
else:
return self._comm.getStateString()
def getCurrentData(self):
return self._stateMonitor.getCurrentData()
def getCurrentJob(self):
currentData = self._stateMonitor.getCurrentData()
return currentData["job"]
def getCurrentTemperatures(self):
return {
"extruder": {
"current": self._temp,
"target": self._targetTemp
},
"bed": {
"current": self._bedTemp,
"target": self._targetBedTemp
}
}
def isClosedOrError(self):
return self._comm is None or self._comm.isClosedOrError()
def isOperational(self):
return self._comm is not None and self._comm.isOperational()
def isPrinting(self):
return self._comm is not None and self._comm.isPrinting()
def isPaused(self):
return self._comm is not None and self._comm.isPaused()
def isError(self):
return self._comm is not None and self._comm.isError()
def isReady(self):
return self.isOperational() and not self._comm.isStreaming()
def isLoading(self):
return self._gcodeLoader is not None
class GcodeLoader(threading.Thread):
"""
The GcodeLoader takes care of loading a gcode-File from disk and parsing it into a gcode object in a separate
thread while constantly notifying interested listeners about the current progress.
The progress is returned as a float value between 0 and 1 which is to be interpreted as the percentage of completion.
"""
def __init__(self, filename, progressCallback, loadedCallback):
threading.Thread.__init__(self)
self._progressCallback = progressCallback
self._loadedCallback = loadedCallback
self._filename = filename
self._gcodeList = None
def run(self):
#Send an initial M110 to reset the line counter to zero.
prevLineType = lineType = "CUSTOM"
gcodeList = ["M110 N0"]
filesize = os.stat(self._filename).st_size
with open(self._filename, "r") as file:
for line in file:
if line.startswith(";TYPE:"):
lineType = line[6:].strip()
if ";" in line:
line = line[0:line.find(";")]
line = line.strip()
if len(line) > 0:
if prevLineType != lineType:
gcodeList.append((line, lineType, ))
else:
gcodeList.append(line)
prevLineType = lineType
self._onLoadingProgress(float(file.tell()) / float(filesize))
self._gcodeList = gcodeList
self._loadedCallback(self._filename, self._gcodeList)
def _onLoadingProgress(self, progress):
self._progressCallback(self._filename, progress, "loading")
def _onParsingProgress(self, progress):
self._progressCallback(self._filename, progress, "parsing")
class SdFileStreamer(threading.Thread):
def __init__(self, comm, filename, file, progressCallback, finishCallback):
threading.Thread.__init__(self)
self._comm = comm
self._filename = filename
self._file = file
self._progressCallback = progressCallback
self._finishCallback = finishCallback
def run(self):
if self._comm.isBusy():
return
name = self._filename[:self._filename.rfind(".")]
sdFilename = name[:8].lower() + ".gco"
try:
size = os.stat(self._file).st_size
with open(self._file, "r") as f:
self._comm.startSdFileTransfer(sdFilename)
for line in f:
if ";" in line:
line = line[0:line.find(";")]
line = line.strip()
if len(line) > 0:
self._comm.sendCommand(line)
time.sleep(0.001) # do not send too fast
self._progressCallback(sdFilename, float(f.tell()) / float(size))
finally:
self._comm.endSdFileTransfer(sdFilename)
self._finishCallback(sdFilename)
class StateMonitor(object):
def __init__(self, ratelimit, updateCallback, addTemperatureCallback, addLogCallback, addMessageCallback):
self._ratelimit = ratelimit
self._updateCallback = updateCallback
self._addTemperatureCallback = addTemperatureCallback
self._addLogCallback = addLogCallback
self._addMessageCallback = addMessageCallback
self._state = None
self._jobData = None
self._gcodeData = None
self._sdUploadData = None
self._currentZ = None
self._progress = None
self._changeEvent = threading.Event()
self._lastUpdate = time.time()
self._worker = threading.Thread(target=self._work)
self._worker.daemon = True
self._worker.start()
def reset(self, state=None, jobData=None, progress=None, currentZ=None):
self.setState(state)
self.setJobData(jobData)
self.setProgress(progress)
self.setCurrentZ(currentZ)
def addTemperature(self, temperature):
self._addTemperatureCallback(temperature)
self._changeEvent.set()
def addLog(self, log):
self._addLogCallback(log)
self._changeEvent.set()
def addMessage(self, message):
self._addMessageCallback(message)
self._changeEvent.set()
def setCurrentZ(self, currentZ):
self._currentZ = currentZ
self._changeEvent.set()
def setState(self, state):
self._state = state
self._changeEvent.set()
def setJobData(self, jobData):
self._jobData = jobData
self._changeEvent.set()
def setProgress(self, progress):
self._progress = progress
self._changeEvent.set()
def _work(self):
while True:
self._changeEvent.wait()
now = time.time()
delta = now - self._lastUpdate
additionalWaitTime = self._ratelimit - delta
if additionalWaitTime > 0:
time.sleep(additionalWaitTime)
data = self.getCurrentData()
self._updateCallback(data)
self._lastUpdate = time.time()
self._changeEvent.clear()
def getCurrentData(self):
return {
"state": self._state,
"job": self._jobData,
"currentZ": self._currentZ,
"progress": self._progress
}
|
agpl-3.0
| 2,885,922,597,023,972,000 | 28.379509 | 189 | 0.712525 | false | 3.325167 | true | false | false |
nimasmi/wagtail
|
wagtail/core/blocks/struct_block.py
|
1
|
8310
|
import collections
from django import forms
from django.core.exceptions import ValidationError
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.utils.functional import cached_property
from django.utils.html import format_html, format_html_join
from django.utils.safestring import mark_safe
from wagtail.admin.staticfiles import versioned_static
from .base import Block, DeclarativeSubBlocksMetaclass
from .utils import js_dict
__all__ = ['BaseStructBlock', 'StructBlock', 'StructValue']
class StructValue(collections.OrderedDict):
""" A class that generates a StructBlock value from provded sub-blocks """
def __init__(self, block, *args):
super().__init__(*args)
self.block = block
def __html__(self):
return self.block.render(self)
def render_as_block(self, context=None):
return self.block.render(self, context=context)
@cached_property
def bound_blocks(self):
return collections.OrderedDict([
(name, block.bind(self.get(name)))
for name, block in self.block.child_blocks.items()
])
class BaseStructBlock(Block):
def __init__(self, local_blocks=None, **kwargs):
self._constructor_kwargs = kwargs
super().__init__(**kwargs)
# create a local (shallow) copy of base_blocks so that it can be supplemented by local_blocks
self.child_blocks = self.base_blocks.copy()
if local_blocks:
for name, block in local_blocks:
block.set_name(name)
self.child_blocks[name] = block
self.child_js_initializers = {}
for name, block in self.child_blocks.items():
js_initializer = block.js_initializer()
if js_initializer is not None:
self.child_js_initializers[name] = js_initializer
self.dependencies = self.child_blocks.values()
def get_default(self):
"""
Any default value passed in the constructor or self.meta is going to be a dict
rather than a StructValue; for consistency, we need to convert it to a StructValue
for StructBlock to work with
"""
return self._to_struct_value(self.meta.default.items())
def js_initializer(self):
# skip JS setup entirely if no children have js_initializers
if not self.child_js_initializers:
return None
return "StructBlock(%s)" % js_dict(self.child_js_initializers)
@property
def media(self):
return forms.Media(js=[versioned_static('wagtailadmin/js/blocks/struct.js')])
def get_form_context(self, value, prefix='', errors=None):
if errors:
if len(errors) > 1:
# We rely on StructBlock.clean throwing a single ValidationError with a specially crafted
# 'params' attribute that we can pull apart and distribute to the child blocks
raise TypeError('StructBlock.render_form unexpectedly received multiple errors')
error_dict = errors.as_data()[0].params
else:
error_dict = {}
bound_child_blocks = collections.OrderedDict([
(
name,
block.bind(value.get(name, block.get_default()),
prefix="%s-%s" % (prefix, name), errors=error_dict.get(name))
)
for name, block in self.child_blocks.items()
])
return {
'children': bound_child_blocks,
'help_text': getattr(self.meta, 'help_text', None),
'classname': self.meta.form_classname,
'block_definition': self,
'prefix': prefix,
}
def render_form(self, value, prefix='', errors=None):
context = self.get_form_context(value, prefix=prefix, errors=errors)
return mark_safe(render_to_string(self.meta.form_template, context))
def value_from_datadict(self, data, files, prefix):
return self._to_struct_value([
(name, block.value_from_datadict(data, files, '%s-%s' % (prefix, name)))
for name, block in self.child_blocks.items()
])
def value_omitted_from_data(self, data, files, prefix):
return all(
block.value_omitted_from_data(data, files, '%s-%s' % (prefix, name))
for name, block in self.child_blocks.items()
)
def clean(self, value):
result = [] # build up a list of (name, value) tuples to be passed to the StructValue constructor
errors = {}
for name, val in value.items():
try:
result.append((name, self.child_blocks[name].clean(val)))
except ValidationError as e:
errors[name] = ErrorList([e])
if errors:
# The message here is arbitrary - StructBlock.render_form will suppress it
# and delegate the errors contained in the 'params' dict to the child blocks instead
raise ValidationError('Validation error in StructBlock', params=errors)
return self._to_struct_value(result)
def to_python(self, value):
""" Recursively call to_python on children and return as a StructValue """
return self._to_struct_value([
(
name,
(child_block.to_python(value[name]) if name in value else child_block.get_default())
# NB the result of get_default is NOT passed through to_python, as it's expected
# to be in the block's native type already
)
for name, child_block in self.child_blocks.items()
])
def _to_struct_value(self, block_items):
""" Return a Structvalue representation of the sub-blocks in this block """
return self.meta.value_class(self, block_items)
def get_prep_value(self, value):
""" Recursively call get_prep_value on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_prep_value(val))
for name, val in value.items()
])
def get_api_representation(self, value, context=None):
""" Recursively call get_api_representation on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_api_representation(val, context=context))
for name, val in value.items()
])
def get_searchable_content(self, value):
content = []
for name, block in self.child_blocks.items():
content.extend(block.get_searchable_content(value.get(name, block.get_default())))
return content
def deconstruct(self):
"""
Always deconstruct StructBlock instances as if they were plain StructBlocks with all of the
field definitions passed to the constructor - even if in reality this is a subclass of StructBlock
with the fields defined declaratively, or some combination of the two.
This ensures that the field definitions get frozen into migrations, rather than leaving a reference
to a custom subclass in the user's models.py that may or may not stick around.
"""
path = 'wagtail.core.blocks.StructBlock'
args = [list(self.child_blocks.items())]
kwargs = self._constructor_kwargs
return (path, args, kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
for name, child_block in self.child_blocks.items():
errors.extend(child_block.check(**kwargs))
errors.extend(child_block._check_name(**kwargs))
return errors
def render_basic(self, value, context=None):
return format_html('<dl>\n{}\n</dl>', format_html_join(
'\n', ' <dt>{}</dt>\n <dd>{}</dd>', value.items()))
class Meta:
default = {}
form_classname = 'struct-block'
form_template = 'wagtailadmin/block_forms/struct.html'
value_class = StructValue
# No icon specified here, because that depends on the purpose that the
# block is being used for. Feel encouraged to specify an icon in your
# descendant block type
icon = "placeholder"
class StructBlock(BaseStructBlock, metaclass=DeclarativeSubBlocksMetaclass):
pass
|
bsd-3-clause
| 7,582,453,976,146,293,000 | 37.472222 | 107 | 0.622262 | false | 4.229008 | false | false | false |
ypid/series60-remote
|
pc/devices/status_numbers.py
|
1
|
2071
|
# -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2010 Lukas Hetzenecker <LuHe@gmx.at>
NUM_CONNECTED = 100
NUM_HELLO_REQUEST = 110
NUM_HELLO_REPLY = 111
NUM_QUIT = 120
NUM_PARTIAL_MESSAGE = 130
NUM_CONTACTS_REQUEST_HASH_ALL = 200
NUM_CONTACTS_REQUEST_HASH_SINGLE= 201
NUM_CONTACTS_REQUEST_CONTACT = 204
NUM_CONTACTS_REQUEST_CONTACTS_ALL = 205
NUM_CONTACTS_REPLY_HASH_ALL= 210
NUM_CONTACTS_REPLY_HASH_SINGLE_START= 211
NUM_CONTACTS_REPLY_HASH_SINGLE_LINE= 212
NUM_CONTACTS_REPLY_HASH_SINGLE_END= 213
NUM_CONTACTS_REPLY_CONTACT_START = 220
NUM_CONTACTS_REPLY_CONTACT_LINE = 221
NUM_CONTACTS_REPLY_CONTACT_END = 222
NUM_CONTACTS_REPLY_CONTACTS_ALL_END = 223
NUM_CONTACTS_ADD = 230
NUM_CONTACTS_ADD_REPLY_ID = 231
NUM_CONTACTS_DELETE = 232
NUM_CONTACTS_CHANGE_ADDFIELD = 233
NUM_CONTACTS_CHANGE_REMOVEFIELD = 234
NUM_SYSINFO_REQUEST = 250
NUM_SYSINFO_REPLY_START = 260
NUM_SYSINFO_REPLY_LINE = 261
NUM_SYSINFO_REPLY_END = 262
NUM_MESSAGE_SEND_REQUEST = 300
NUM_MESSAGE_SEND_REPLY_OK = 301
NUM_MESSAGE_SEND_REPLY_STATUS = 302
NUM_MESSAGE_SEND_REPLY_FAILURE = 303
NUM_MESSAGE_SEND_REPLY_RETRY = 304
NUM_SET_READ = 320
NUM_MESSAGE_NEW = 350
NUM_MESSAGE_REQUEST = 351
NUM_MESSAGE_REPLY_LINE = 352
NUM_MESSAGE_REPLY_END = 353
NUM_MESSAGE_REQUEST_UNREAD = 370
NUM_MESSAGE_REPLY_UNREAD = 371
NUM_CALENDAR_REQUEST_HASH_ALL = 380
#NUM_CALENDAR_REQUEST_HASH_SINGLE = 381
NUM_CALENDAR_REQUEST_ENTRY = 382
NUM_CALENDAR_REQUEST_ENTRIES_ALL = 383
NUM_CALENDAR_REPLY_HASH_ALL= 384
#NUM_CALENDAR_REPLY_HASH_SINGLE_START= 385
#NUM_CALENDAR_REPLY_HASH_SINGLE_LINE= 386
#NUM_CALENDAR_REPLY_HASH_SINGLE_END= 387
NUM_CALENDAR_REPLY_ENTRIES_START = 388
NUM_CALENDAR_REPLY_ENTRY = 389
NUM_CALENDAR_REPLY_ENTRIES_END = 390
NUM_CALENDAR_ENTRY_ADD = 395
NUM_CALENDAR_ENTRY_ADD_REPLY = 396
NUM_CALENDAR_ENTRY_DELETE = 397
NUM_CALENDAR_ENTRY_CHANGE = 398
NUM_CALENDAR_ENTRY_CHANGE_REPLY_TIME = 399
NUM_INCOMING_CALL = 400
NUM_DEBUG = 999
NUM_END_HEADER = chr(0x02) # Start of Text
NUM_SEPERATOR = chr(0x1E) # Record Separator
NUM_END_TEXT = chr(0x03) # End of Text
PROTOCOL_VERSION = 1.5
|
gpl-2.0
| 2,607,002,893,510,730,000 | 26.986486 | 59 | 0.759536 | false | 2.486194 | false | true | false |
ngmiller/mipsy
|
mipsy/encoder.py
|
1
|
8100
|
"""
mipsy.encoder
Instruction encoder.
See README.md for usage and general information.
"""
# system imports
import bitstring
# application imports
from mipsy.arch import MIPS
from mipsy.util import LabelCache, ParseInfo
class Encoder(object):
"""
Responsible for encoding individual instructions and querying the label cache.
"""
class tokenizer(object):
"""
Defines a 'list' of tokenizing functions used for varying instructions.
Each 'tokenizer' returns a dictionary mapping the specified operands to their tokens
from the instruction data (the portion of the instruction following the operation)
instruction = (operation) (instruction_data) <-- here, we're only concerned with instruction_data
"""
def map_operands(self, to_split, operands):
"""
Helper method.
Maps operands to the preprocessed instruction data string.
"""
operand_values = to_split.split()
if len(operands) != len(operand_values):
raise RuntimeError('instruction contains too many operands')
operand_map = {}
for i in range(len(operands)):
operand_map[operands[i]] = operand_values[i]
return operand_map
def RI_type(self, operands, instruction_data):
"""
The RI_type tokenizer takes instructions with the format:
(operation) [(operand1), (operand2), (operand3)]
"""
to_split = instruction_data.replace(',', ' ')
return self.map_operands(to_split, operands)
def J_type(self, operands, instruction_data):
"""
The J_type tokenizer takes jump (j, jal, jr) instructions
with the format:
(operation) [operand]
"""
return self.map_operands(instruction_data, operands)
def load_store(self, operands, instruction_data):
"""
The load_store tokenizer takes instructions with the format:
(operation) [operand1, (operand2)(operand3)]
"""
# Clear out commas and the parenthesis surrounding the base register
to_split = instruction_data.replace(',', ' ').replace('(', ' ').replace(')', ' ')
return self.map_operands(to_split, operands)
def nop(self, operands, instruction_data):
"""
The nop tokenizer simply maps all the given operands to register $zero.
"""
return {operand: '$zero' for operand in operands}
# The assembler operation table defines the parsing rules
# for a given instruction. The parsing rules are used to
# map tokens in the instruction string to register address
# and immediate value positions. (rs, rt, rd, etc)
t = tokenizer()
operations = {
'nop' : ParseInfo(['rd', 'rs', 'rt'], t.nop),
'add' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'addi' : ParseInfo(['rt', 'rs', 'imm'], t.RI_type),
'and' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'beq' : ParseInfo(['rs', 'rt', 'label'], t.RI_type),
'j' : ParseInfo(['label'], t.J_type),
'jal' : ParseInfo(['label'], t.J_type),
'jr' : ParseInfo(['rs'], t.RI_type),
'lw' : ParseInfo(['rt', 'imm', 'rs'], t.load_store),
'or' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'slt' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'sll' : ParseInfo(['rd', 'rt', 'shamt'], t.RI_type),
'sw' : ParseInfo(['rt', 'imm', 'rs'], t.load_store),
'sub' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
# TODO ...
}
def __init__(self):
# ISA definitions
self.mips = MIPS()
# Label resolution cache
self.label_cache = LabelCache()
def encode_instruction(self, pc, instr):
"""
Given an instruction string, generate the encoded bit string.
PC (instruction index is used for branch label resolution)
"""
data = instr.split()
operation = data[0]
try:
mips_op_info = MIPS.operations[operation]
except KeyError, e:
raise RuntimeError('Unknown operation: {}'.format(operation))
# Grab the parsing info from the assembler operations table
# Generate the initial operand map using the specified tokenizer
parse_info = self.operations[operation]
encoding_map = parse_info.tokenizer(parse_info.tokens, ''.join(data[1:]))
# Get the binary equivalents of the operands and MIPS operation information
self.resolve_operands(encoding_map, operation, pc)
# Pull MIPS operation info into encoding map
self.resolve_operation_info(encoding_map, mips_op_info)
instruction = self.mips.generate_instruction(mips_op_info.format)
return instruction.encode(encoding_map)
def resolve_operation_info(self, encoding_map, mips_op_info):
"""
Adds the predefined operation info (opcode, funct) to the current encoding map.
"""
encoding_map['opcode'] = mips_op_info.opcode
encoding_map['funct'] = mips_op_info.funct
def resolve_operands(self, encoding_map, operation, pc):
"""
Converts generic register references (such as $t0, $t1, etc), immediate values, and jump addresses
to their binary equivalents.
"""
convert = Encoder.to_binary
branch_replace = False
jump_replace = False
for operand, value in encoding_map.iteritems():
if (operand == 'rs' or operand == 'rt' or operand == 'rd'):
encoding_map[operand] = MIPS.registers[value]
elif (operand == 'imm'):
encoding_map[operand] = convert(int(value), MIPS.IMMEDIATE_SIZE)
elif (operand == 'addr'):
encoding_map[operand] = convert(int(value), MIPS.ADDRESS_SIZE)
elif (operand == 'shamt'):
encoding_map[operand] = convert(int(value), MIPS.SHAMT_SIZE)
elif (operand == 'label'):
label = encoding_map[operand]
hit, index = self.label_cache.query(label)
if not hit:
raise RuntimeError('No address found for label: {}'.format(label))
if ((operation == 'beq') or (operation == 'bne')):
# Calculate the relative instruction offset. The MIPS ISA uses
# PC + 4 + (branch offset) to resolve branch targets.
if index > pc:
encoding_map[operand] = convert(index - pc - 1, MIPS.IMMEDIATE_SIZE)
elif index < pc:
encoding_map[operand] = convert((pc + 1) - index, MIPS.IMMEDIATE_SIZE)
else:
# Not sure why a branch would resolve to itself, but ok
# (PC + 4) - 4 =
encoding_map[operand] = convert(-1, MIPS.IMMEDIATE_SIZE)
branch_replace = True
elif ((operation == 'j') or (operation == 'jal')):
# Jump addresses are absolute
encoding_map[operand] = convert(index, MIPS.ADDRESS_SIZE)
jump_replace = True
# Need to convert references to 'label' back to references the instruction
# encoding string recognizes, otherwise we end up with the default value (zero)
# This doesn't feel very clean, but working on a fix.
if branch_replace:
encoding_map['imm'] = encoding_map['label']
elif jump_replace:
encoding_map['addr'] = encoding_map['label']
@staticmethod
def to_binary(decimal, length):
"""
Given a decimal, generate the binary equivalent string of
given length.
e.g. binary(2, 5) = 00010
"""
b = bitstring.Bits(int=decimal, length=length)
return b.bin
|
mit
| -3,993,751,590,257,310,700 | 38.512195 | 106 | 0.564691 | false | 4.236402 | false | false | false |
Akson/RemoteConsolePlus3
|
RemoteConsolePlus3/RCP3/Backends/Processors/Graphs/Plot1D.py
|
1
|
2341
|
#Created by Dmytro Konobrytskyi, 2014 (github.com/Akson)
import numpy as np
import matplotlib
import matplotlib.pyplot
from RCP3.Infrastructure import TmpFilesStorage
class Backend(object):
def __init__(self, parentNode):
self._parentNode = parentNode
def Delete(self):
"""
This method is called when a parent node is deleted.
"""
pass
def GetParameters(self):
"""
Returns a dictionary with object parameters, their values,
limits and ways to change them.
"""
return {}
def SetParameters(self, parameters):
"""
Gets a dictionary with parameter values and
update object parameters accordingly
"""
pass
def ProcessMessage(self, message):
"""
This message is called when a new message comes.
If an incoming message should be processed by following nodes, the
'self._parentNode.SendMessage(message)'
should be called with an appropriate message.
"""
dataArray = np.asarray(message["Data"])
fig = matplotlib.pyplot.figure(figsize=(6, 4), dpi=float(96))
ax=fig.add_subplot(111)
#n, bins, patches = ax.hist(dataArray, bins=50)
ax.plot(range(len(dataArray)), dataArray)
processedMessage = {"Stream":message["Stream"], "Info":message["Info"]}
filePath, link = TmpFilesStorage.NewTemporaryFile("png")
fig.savefig(filePath,format='png')
matplotlib.pyplot.close(fig)
html = '<img src="http://{}" alt="Image should come here">'.format(link)
processedMessage["Data"] = html
self._parentNode.SendMessage(processedMessage)
"""
print len(message["Data"])
import numpy as np
import matplotlib.pyplot as plt
x = np.array(message["Data"])
num_bins = 50
# the histogram of the data
n, bins, patches = plt.hist(x, num_bins, normed=1, facecolor='green', alpha=0.5)
plt.subplots_adjust(left=0.15)
plt.show()
"""
def AppendContextMenuItems(self, menu):
"""
Append backend specific menu items to a context menu that user will see
when he clicks on a node.
"""
pass
|
lgpl-3.0
| -487,449,994,099,500,860 | 29.415584 | 88 | 0.5912 | false | 4.383895 | false | false | false |
BarusXXX/K-Tree
|
TreeLogic.py
|
1
|
3884
|
import os
from copy import deepcopy
class RecursiveTree:
def __init__(self, dir_name):
self.dir_name = dir_name
self.files = []
self.folders = [] #Tuple Absolute address, branch, level
self.branches = []
self.children_n = []
self.currentlevel = 0
self.level=[] #len(self.branches)
self.level.append(0)
self.folder_n = len(self.folders)
self.parentIndex = []
self.parentbranch = []
self.iterator = 0
self.reversead = 0
self.parentIndex.append(None)
self.branches.append([0])
self.folders.append((dir_name, "{0}", 0))
RecursiveTree.get_immediate_subdirectories(self, self.dir_name, 0)
self.level_max = max(self.level)
def Branch(self):
pass
def PrintTree(self):
print("#Folders#")
for x in self.folders:
print(x)
print("#Branches#")
for x in self.branches:
print(x)
print("#Parent Branches#")
for x in self.parentbranch:
print(x)
print("#Files#")
for x in self.files:
print(x)
def subdir(self):
return self.folders
def filedir(self):
return self.files
def sortedbranches(self):
STree = []
CountX = 0
for x in self.branches:
STree.append([])
for y in x:
STree[CountX].append(int(y))
CountX += 1
SSum = []
CountX = 0
TTree = deepcopy(STree)
for x in TTree:
CountY = 0
for y in x:
TTree[CountX][CountY] = y + 1
CountY += 1
CountX += 1
SSum.append(sum(x))
SortedTree = [x for y, x in sorted(list(zip(SSum, STree)))]
def get_immediate_subdirectories(self, a_dir, curadd):
nextadd = 0
relocator = 0
cancleNo = self.reversead
for name in os.listdir(a_dir):
if os.path.isdir(os.path.join(a_dir, name)):
curaddstr = str(curadd) + ";" + str(nextadd)
relocator += 1
self.iterator += 1
self.currentlevel += 1
ContainsSub = False
ContainsNo = 0
for x in os.listdir(a_dir + "/" + name):
if os.path.isdir(a_dir + "/" + name + "/" + x):
ContainsSub = True
ContainsNo += 1
self.children_n.append(ContainsNo)
PathConstructor = "{" + str(curadd) + ";" + str(nextadd) + "}" + ":" + os.path.join(a_dir, name)
AbsAddressConstructor = (PathConstructor.split(":")[1]), (PathConstructor.split(":")[2])
self.folders.append((":".join(AbsAddressConstructor), PathConstructor.split(":")[0], self.currentlevel))
self.branches.append((((((PathConstructor.split(":")[0]).split("{")[1])).split("}")[0]).split(";")))
self.parentbranch.append(str(curadd).split(";"))
self.level.append(self.currentlevel)
self.parentIndex.append(self.iterator - relocator - self.reversead + cancleNo) #Cannot negate 1
RecursiveTree.get_immediate_subdirectories(self, (a_dir + "/" + name), curaddstr)
self.currentlevel -= 1
if ContainsSub == True:
self.reversead += ContainsNo
nextadd += 1
else:
self.files.append((self.iterator - relocator - self.reversead + cancleNo, os.path.join(a_dir, name))) #index of parent, direct links to file
#print("file found:", self.iterator - relocator - self.reversead + cancleNo, name)
#print("{"+str(curadd) + ";" + str(nextadd) + "}" + ":" + os.path.join(a_dir, name))
|
mit
| 4,737,420,698,815,880,000 | 29.582677 | 156 | 0.511843 | false | 3.903518 | false | false | false |
ndparker/wolfe
|
wolfe/scheduler/_job_queue.py
|
1
|
4458
|
# -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========
Job Queue
===========
Job Queue. The queue is implemented as priority queue using a heap.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import heapq as _heapq
class JobQueue(object):
"""
Job queue
This container utilizes a heap structure to implement a more or less
generic priority queue (see below). The sorting order of the items is
defined by a wrapper class passed to the constructor.
The queue is made for jobs. That's why wrapper classes have to provide a
job attribute for unwrapping and items passed into the queue are expected
to provide a valid ``id`` attribute.
Additionally the queue implements boolean operations (it's false if it's
empty) and a __contains__ operation based on job IDs.
>>> class Wrapper(object):
... def __init__(self, job):
... self.job = job
... def __lt__(self, other):
... return self.job.id > other.job.id
>>> class Job(object):
... def __init__(self, job_id):
... self.id = job_id
>>> queue = JobQueue(Wrapper)
>>> queue.put(Job(2))
>>> bool(queue)
True
>>> 1 in queue
False
>>> 2 in queue
True
>>> len(queue)
1
:IVariables:
`_queue` : ``list``
actual heap containing wrapped jobs
`_wrapper` : callable
Wrapper class factory
`_ids` : ``set``
Set of job IDs currently queued
"""
def __init__(self, wrapper_class):
"""
Initialization
:Parameters:
`wrapper_class` : any
class factory expected to take a job and represent it inside the
queue. The object should be comparable with other instances
(``__lt__`` is the proper method) and should provide a ``job``
attribute pointing to the original object.
"""
self._queue = []
self._wrapper = wrapper_class
self._ids = set()
def __nonzero__(self):
"""
Return false if the queue is empty, true otherwise
:Return: Is there something in the queue?
:Rtype: ``bool``
"""
return bool(self._queue)
def __contains__(self, job_id):
"""
Check if the passed job_id is currently enqueued
:Return: Is it?
:Rtype: ``bool``
"""
return job_id in self._ids
def __len__(self):
""" Find queue length """
return len(self._queue)
def __iter__(self):
""" Iterate over the queue until it's exhausted """
try:
while True:
yield self.get()
except IndexError:
pass
def put(self, job):
"""
Put a job into the queue
:Parameters:
`job` : any
The job to put in. The object must have an ``id`` attribute,
which must be hashable.
"""
self._ids.add(job.id)
_heapq.heappush(self._queue, self._wrapper(job))
def get(self):
"""
Get the next job from the queue
:Return: A job
:Rtype: any
:Exceptions:
- `IndexError` : Queue was empty
"""
job = _heapq.heappop(self._queue).job
self._ids.remove(job.id)
return job
def peek(self):
"""
Return the next job without removing it from the queue
The job will still be wrapped in the wrapper_class container
:Return: wrapped job
:Rtype: any
:Exceptions:
- `IndexError` : Queue was empty
"""
return self._queue[0]
|
apache-2.0
| -4,326,341,695,374,241,300 | 25.855422 | 77 | 0.580978 | false | 4.270115 | false | false | false |
bvanrijn/debianpaste-clients
|
old-paste.py
|
1
|
7602
|
#!/usr/bin/python
# Filename: paste
# Purpose: XmlRpc interface client to paste.debian.net
# Author: Copyright (C) 2007-2011 Michael Gebetsroither <michael@mgeb.org>
# License: This file is licensed under the GPL v2+. Full license text in LICENSE
# Modified original: No modifications have been made
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import sys
import xmlrpclib
import optparse
import inspect
import getpass
# program defaults
DEFAULT_SERVER='http://paste.debian.net/server.pl'
class ActionFailedException(Exception):
'''Thrown if server returned an error'''
def __init__(self, errormsg, ret):
Exception.__init__(self, errormsg, ret)
def what(self):
'''Get errormessage'''
return self.args[0]
def dwhat(self):
'''Get more verbose errormessage'''
return self.args[1]
class Action(object):
def __init__(self, args, opts):
self.args_ = args
self.opts_ = opts
def _createProxy(self):
return xmlrpclib.ServerProxy(self.opts_.server, verbose=False)
def _callProxy(self, functor, server=None):
'''Wrapper for xml-rpc calls to server which throws an
ActionFailedException on error'''
if server is None:
server = self._createProxy()
ret = functor(server)
if ret['rc'] != 0:
raise ActionFailedException(ret['statusmessage'], ret)
return ret
def call(self, method_name):
'''External Interface to call the appropriate action'''
return self.__getattribute__(method_name)()
def actionAddPaste(self):
'''Add paste to the server: <1.line> <2.line> ...
default Read paste from stdin.
[text] Every argument on the commandline will be interpreted as
a seperate line of paste.
'''
server = self._createProxy()
o = self.opts_
code = self.args_
if len(self.args_) == 0:
code = [ i.rstrip() for i in sys.stdin.readlines() ]
code = '\n'.join(code)
result = self._callProxy(lambda s: s.paste.addPaste(code, o.name, o.expire * 3600, o.lang, o.private),
server)
return (result['statusmessage'], result)
def actionDelPaste(self):
'''Delete paste from server: <digest>
<digest> Digest of paste you want to remove.
'''
digest = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.deletePaste(digest))
return (result['statusmessage'], result)
def actionGetPaste(self):
'''Get paste from server: <id>
<id> Id of paste you want to receive.
'''
id = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.getPaste(id))
return (result['code'], result)
def actionGetLangs(self):
'''Get supported language highlighting types from server'''
result = self._callProxy(lambda s: s.paste.getLanguages())
return ('\n'.join(result['langs']), result)
def actionAddShortUrl(self):
'''Add short-URL: <url>
<url> Short-URL to add
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.addShortURL(url))
return (result['url'], result)
def actionGetShortUrl(self):
'''Resolve short-URL: <url>
<url> Short-URL to get clicks of
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.resolveShortURL(url))
return (result['url'], result)
def actionGetShortUrlClicks(self):
'''Get clicks of short-URL: <url>
<url> Short-URL to get clicks of
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.ShortURLClicks(url))
return (result['count'], result)
def actionHelp(self):
'''Print more verbose help about specific action: <action>
<action> Topic on which you need more verbose help.
'''
if len(self.args_) < 1:
alias = "help"
else:
alias = self.args_.pop(0)
if alias in actions:
fun = actions[alias]
print inspect.getdoc(self.__getattribute__(fun))
print "\naliase: " + " ".join([i for i in actions_r[fun] if i != alias])
else:
print "Error: No such command - %s" % (alias)
OPT_PARSER.print_usage()
sys.exit(0)
# actionAddPaste -> [add, a]
actions_r = {}
# add -> actionAddPaste
# a -> actionAddPaste
actions = {}
# option parser
OPT_PARSER = None
##
# MAIN
##
if __name__ == "__main__":
action_spec = ['actionAddPaste add a',
'actionDelPaste del d rm',
'actionGetPaste get g',
'actionGetLangs getlangs gl langs l',
'actionAddShortUrl addurl',
'actionGetShortUrl geturl',
'actionGetShortUrlClicks getclicks',
'actionHelp help']
for i in action_spec:
aliases = i.split()
cmd = aliases.pop(0)
actions_r[cmd] = aliases
for (k,v) in actions_r.items():
for i in v:
actions[i] = k
usage = "usage: %prog [options] ACTION <args>\n\n" +\
"actions:\n" +\
"\n".join(["%12s\t%s" % (v[0], inspect.getdoc(getattr(Action, k)).split('\n')[0]) \
for (k,v) in actions_r.items()])
running_user = getpass.getuser()
parser = optparse.OptionParser(usage=usage)
parser.add_option('-n', '--name', default=running_user, help="Name of poster")
parser.add_option('-e', '--expire', type=int, default=72, metavar='HOURS',
help='Time at wich paste should expire')
parser.add_option('-l', '--lang', default='Plain', help='Type of language to highlight')
parser.add_option("-p", "--private", action="count", dest="private", default=0,
help='Create hidden paste'),
parser.add_option('-s', '--server', default=DEFAULT_SERVER,
help='Paste server')
parser.add_option('-v', '--verbose', action='count', default=0, help='More output')
(opts, args) = parser.parse_args()
OPT_PARSER = parser
if len(args) == 0:
parser.error('Please provide me with an action')
elif args[0] in actions:
cmd = args.pop(0)
action = Action(args, opts)
try:
(msg, ret) = action.call(actions[cmd])
if opts.verbose == 0:
print msg
else:
print ret
except ActionFailedException, e:
sys.stderr.write('Server Error: %s\n' % e.what())
if opts.verbose >0:
print e.dwhat()
sys.exit(1)
else:
parser.error('Unknown action: %s' % args[0])
|
gpl-2.0
| 4,928,760,378,934,636,000 | 35.373206 | 241 | 0.578269 | false | 3.934783 | false | false | false |
wjakob/layerlab
|
recipes/coated-gold-with-scatmedium.py
|
1
|
2082
|
# Creates a rough gold layer with a rough dielectric coating containing an
# anisotropic scattering medium
import sys
sys.path.append('.')
from utils.materials import gold
from utils.cie import get_rgb
import layerlab as ll
eta_top = 1.5
# This step integrates the spectral IOR against the CIE XYZ curves to obtain
# equivalent sRGB values. This may seem fairly approximate but turns out to
# yield excellent agreement with spectral reference renders
print('Computing gold IOR parameters')
eta_bot = get_rgb(gold)
alpha_top = 0.1 # Beckmann roughness of top layer (coating)
alpha_bot = 0.1 # Beckmann roughness of bottom layer (gold)
# Medium parameters
g = 0.5 # Scattering anisotropy
albedo = [0.25, 0.0, 0.95] # Single scattering albedo
tau = 0.5 # Optical depth
# Construct quadrature scheme suitable for the material
n_top, m_top = ll.parameterHeuristicMicrofacet(eta=eta_top, alpha=alpha_top)
n_bot, m_bot = ll.parameterHeuristicMicrofacet(eta=eta_bot[0], alpha=alpha_bot)
n_med, m_med = ll.parameterHeuristicHG(g=g)
n = max(n_top, n_bot) # Max of zenith angle discretization
m = m_top # Number of Fourier orders determined by top layer
mu, w = ll.quad.gaussLobatto(n)
print("# of nodes = %i, fourier orders = %i" % (n, m))
# Construct coating layer
print("Creating coating layer")
coating = ll.Layer(mu, w, m)
coating.setMicrofacet(eta=eta_top, alpha=alpha_top)
output = []
for channel in range(3):
# Construct diffuse bottom layer for each channel
print("Creating metal layer")
l = ll.Layer(mu, w, m)
l.setMicrofacet(eta=eta_bot[channel], alpha=alpha_bot)
# Construct medium layer
print("Creating medium layer")
l2 = ll.Layer(mu, w, m)
l2.setHenyeyGreenstein(g=g, albedo=albedo[channel])
l2.expand(tau)
# Apply medium layer
print("Applying medium ..")
l.addToTop(l2)
# Apply coating
print("Applying coating..")
l.addToTop(coating)
output.append(l)
# .. and write to disk
print("Writing to disk..")
storage = ll.BSDFStorage.fromLayerRGB("output.bsdf", *output)
storage.close()
|
bsd-2-clause
| -3,367,170,747,667,034,600 | 29.617647 | 79 | 0.713737 | false | 3.013025 | false | false | false |
plumer/codana
|
projectdata.py
|
1
|
5358
|
class VersionDataManager:
"""Manager of all the information of files and packages in a specific version
Attributes:
packages (list of str): List of packages name
files (list of str): List of all the files in the project
packagedict (dict): Map of packages(key) and filenames(value)
filebugnum (dict): Map of filename(key) and bug numbers(value)
fileattr (dict): Map of filename(key) and the attributes of the file(value)
packageattr (dict): Map of package(key) and the attributes of the package(value)
filedepends (list of tuple): List of all the edges in the dependence graph of all files
packagedepends (list of tuple) : List of all the edges in the dependence graph of all packages
"""
def __init__(self, version='6.0.0'):
self.packagedict = {}
self.fileattr = {}
self.files = []
self.filebugnum = {}
self.packageattr = {}
self.versionArray = []
datafile = open(r'tomcat_history/tomcat' + version + r'/tomcat_pack.txt', 'r')
for packs in datafile:
packslice = packs.strip(' \t\n').split('\t')
self.packagedict[packslice[0]] = []
self.packageattr[packslice[0]] = self.packPackageAttr(packslice[1:])
filenum = 0
if int(packslice[1]) == 0:
continue
for files in datafile:
fileattr = files.strip(' \t\n').split('\t')
if not fileattr[0] in self.packagedict[packslice[0]]:
self.files.append(fileattr[0])
self.packagedict[packslice[0]].append(fileattr[0])
self.fileattr[fileattr[0]] = self.packFileAttr(fileattr[1:])
filenum = filenum + 1
if filenum >= int(packslice[1]):
break
datafile.close()
datafile = open(r'tomcat_history/tomcat' + version + r'/log.txt', 'r')
for record in datafile:
recordslice = record.strip(' \t\n').split('\t')
self.filebugnum[recordslice[0]] = int(recordslice[1])
datafile.close()
self.packages = self.packagedict.keys()
self.packagedepends = []
packdependfile = open(r'tomcat_history/tomcat' + version + r'/tomcat_pack_depends.txt', 'r')
for e in packdependfile:
vertices = e.strip(' \t\n').split(' ')
self.packagedepends.append( (vertices[0], vertices[-1]) )
packdependfile.close()
self.filedepends = []
filedependfile = open(r'tomcat_history/tomcat' + version + r'/tomcat_depends.txt', 'r')
for e in filedependfile:
vertices = e.strip(' \t\n').split('\t')
self.filedepends.append( (vertices[0], vertices[-1]) )
filedependfile.close()
def packPackageAttr(self, attrs):
return {'filenum' : attrs[0],
'codelines' : attrs[1],
'cyclomatic' : attrs[2]}
def packFileAttr(self, attrs):
return {'codelines' : attrs[0],
'cyclomatic' : attrs[1]}
def listFileAttr(self):
return ('codelines', 'cyclomatic')
def listPackageAttr(self):
return ('filenum', 'codelines' , 'cyclomatic')
def getPackages(self):
return self.packages
def getFilenames(self):
return self.files
def getFilesOfPackage(self, package):
return self.packagedict[package]
def getPackageOfFile(self, filename):
return self.filedict[filename]
def getFileAttr(self, filename):
return self.fileattr[filename]
def getPackageAttr(self, package):
return self.packageattr[package]
def getFileDependence(self):
return self.filedepends
def getPackageDependence(self):
return self.packagedepends
def getFileDependenceOfPackage(self, package):
deplist = []
filelist = self.getFilesOfPackage(package)
for dep in self.filedepends:
if dep[0] in filelist and dep[1] in filelist:
deplist.append(dep)
return deplist
def getBugNumberOfFile(self, filename):
if filename in self.filebugnum:
return self.filebugnum[filename]
return 0
def getBugNumberOfPackage(self, package):
bugnum = 0
for filename in self.packagedict[package]:
if filename in self.filebugnum:
bugnum = bugnum + self.filebugnum[filename]
return bugnum
class DataManager:
'''Manage all the data in all versions
Attributes:
versionArray (list): List of all the versions
dataManages (dict): Map of the version(key) and the specified data manager(value)
'''
def __init__(self):
self.versionArray = []
datafile = open(r'tomcat_history/tomcat_list.txt', 'r')
for line in datafile:
self.versionArray.append(line.strip(' \n').strip('tomcat'))
datafile.close()
self.dataManages = {}
for version in self.versionArray:
self.dataManages[version] = VersionDataManager(version)
def getManager(self, version):
return self.dataManages[version]
def getVersionArray(self):
return self.versionArray
if __name__ == '__main__':
dm = DataManager()
dm.getFileDependenceOfPackage('apache.catalina')
|
mit
| -4,992,400,439,942,177,000 | 35.69863 | 102 | 0.601904 | false | 3.905248 | false | false | false |
chutsu/robotics
|
prototype/models/two_wheel.py
|
1
|
3500
|
from math import cos
from math import sin
import numpy as np
import sympy
from sympy import pprint
def two_wheel_2d_model(x, u, dt):
"""Two wheel 2D motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array (x, y, theta)
"""
gdot = np.array([[u[0, 0] * cos(x[2, 0]) * dt],
[u[0, 0] * sin(x[2, 0]) * dt],
[u[1, 0] * dt]])
return x + gdot
def two_wheel_2d_linearized_model(x, u, dt):
"""Two wheel 2D linearized motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array 3x3 matrix of linearized two wheel model
"""
G1 = 1.0
G2 = 0.0
G3 = -u[0, 0] * sin(x[2, 0]) * dt
G4 = 0.0
G5 = 1.0
G6 = u[0, 0] * cos(x[2, 0]) * dt
G7 = 0.0
G8 = 0.0
G9 = 1.0
return np.array([[G1, G2, G3],
[G4, G5, G6],
[G7, G8, G9]])
def two_wheel_3d_model(x, u, dt):
"""Two wheel 3D motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array (x, y, z, theta)
"""
g1 = x[0] + u[0] * cos(x[3]) * dt
g2 = x[1] + u[0] * sin(x[3]) * dt
g3 = x[2] + u[1] * dt
g4 = x[3] + u[2] * dt
return np.array([g1, g2, g3, g4])
def two_wheel_2d_deriv():
""" Symbolic derivation of Jacobian of the 2D two wheel motion model """
x1, x2, x3, x4, x5 = sympy.symbols("x1,x2,x3,x4,x5")
dt = sympy.symbols("dt")
# x, y, theta, v, omega
f1 = x1 + x4 * sympy.cos(x3) * dt
f2 = x2 + x4 * sympy.sin(x3) * dt
f3 = x3 + x5 * dt
f4 = x4
f5 = x5
F = sympy.Matrix([f1, f2, f3, f4, f5])
pprint(F.jacobian([x1, x2, x3, x4, x5]))
def two_wheel_3d_deriv():
""" Symbolic derivation of Jacobian of the 3D two wheel motion model """
x1, x2, x3, x4, x5, x6, x7 = sympy.symbols("x1,x2,x3,x4,x5,x6,x7")
dt = sympy.symbols("dt")
# x1 - x
# x2 - y
# x3 - z
# x4 - theta
# x5 - v
# x6 - omega
# x7 - vz
# x, y, z, theta, v, omega, vz
f1 = x1 + x5 * sympy.cos(x4) * dt
f2 = x2 + x5 * sympy.sin(x4) * dt
f3 = x3 + x7 * dt
f4 = x4 + x6 * dt
f5 = x5
f6 = x6
f7 = x7
F = sympy.Matrix([f1, f2, f3, f4, f5, f6, f7])
pprint(F.jacobian([x1, x2, x3, x4, x5, x6, x7]))
def two_wheel_3d_deriv2():
""" Symbolic derivation of Jacobian of the 3D two wheel motion model """
functions = sympy.symbols("f1,f2,f3,f4,f5,f6,f7,f8,f9")
variables = sympy.symbols("x1,x2,x3,x4,x5,x6,x7,x8,x9")
f1, f2, f3, f4, f5, f6, f7, f8, f9 = functions
x1, x2, x3, x4, x5, x6, x7, x8, x9 = variables
dt = sympy.symbols("dt")
# x1 - x
# x2 - y
# x3 - z
# x4 - theta
# x5 - v
# x6 - vz
# x7 - omega
# x8 - a
# x9 - az
f1 = x1 + x5 * sympy.cos(x4) * dt
f2 = x2 + x5 * sympy.sin(x4) * dt
f3 = x3 + x6 * dt
f4 = x4 + x7 * dt
f5 = x5 + x8 * dt
f6 = x6 + x9 * dt
f7 = x7
f8 = x8
f9 = x9
F = sympy.Matrix([f1, f2, f3, f4, f5, f6, f7, f8, f9])
pprint(F.jacobian([x1, x2, x3, x4, x5, x6, x7, x8, x9]))
|
gpl-3.0
| 2,906,790,711,327,816,000 | 19.833333 | 76 | 0.483714 | false | 2.470007 | false | false | false |
lingthio/Flask-User
|
flask_user/user_mixin.py
|
1
|
4450
|
"""This module implements the UserMixin class for Flask-User.
This Mixin adds required methods to User data-model.
"""
from flask import current_app
from flask_login import UserMixin as FlaskLoginUserMixin
class UserMixin(FlaskLoginUserMixin):
""" This class adds required methods to the User data-model.
Example:
class User(db.Model, UserMixin):
...
"""
def get_id(self):
"""Converts a User ID and parts of a User password hash to a token."""
# This function is used by Flask-Login to store a User ID securely as a browser cookie.
# The last part of the password is included to invalidate tokens when password change.
# user_id and password_ends_with are encrypted, timestamped and signed.
# This function works in tandem with UserMixin.get_user_by_token()
user_manager = current_app.user_manager
user_id = self.id
password_ends_with = '' if user_manager.USER_ENABLE_AUTH0 else self.password[-8:]
user_token = user_manager.generate_token(
user_id, # User ID
password_ends_with, # Last 8 characters of user password
)
# print("UserMixin.get_id: ID:", self.id, "token:", user_token)
return user_token
@classmethod
def get_user_by_token(cls, token, expiration_in_seconds=None):
# This function works in tandem with UserMixin.get_id()
# Token signatures and timestamps are verified.
# user_id and password_ends_with are decrypted.
# Verifies a token and decrypts a User ID and parts of a User password hash
user_manager = current_app.user_manager
data_items = user_manager.verify_token(token, expiration_in_seconds)
# Verify password_ends_with
token_is_valid = False
if data_items:
# Load user by User ID
user_id = data_items[0]
password_ends_with = data_items[1]
user = user_manager.db_manager.get_user_by_id(user_id)
user_password = '' if user_manager.USER_ENABLE_AUTH0 else user.password[-8:]
# Make sure that last 8 characters of user password matches
token_is_valid = user and user_password==password_ends_with
return user if token_is_valid else None
def has_roles(self, *requirements):
""" Return True if the user has all of the specified roles. Return False otherwise.
has_roles() accepts a list of requirements:
has_role(requirement1, requirement2, requirement3).
Each requirement is either a role_name, or a tuple_of_role_names.
role_name example: 'manager'
tuple_of_role_names: ('funny', 'witty', 'hilarious')
A role_name-requirement is accepted when the user has this role.
A tuple_of_role_names-requirement is accepted when the user has ONE of these roles.
has_roles() returns true if ALL of the requirements have been accepted.
For example:
has_roles('a', ('b', 'c'), d)
Translates to:
User has role 'a' AND (role 'b' OR role 'c') AND role 'd'"""
# Translates a list of role objects to a list of role_names
user_manager = current_app.user_manager
role_names = user_manager.db_manager.get_user_roles(self)
# has_role() accepts a list of requirements
for requirement in requirements:
if isinstance(requirement, (list, tuple)):
# this is a tuple_of_role_names requirement
tuple_of_role_names = requirement
authorized = False
for role_name in tuple_of_role_names:
if role_name in role_names:
# tuple_of_role_names requirement was met: break out of loop
authorized = True
break
if not authorized:
return False # tuple_of_role_names requirement failed: return False
else:
# this is a role_name requirement
role_name = requirement
# the user must have this role
if not role_name in role_names:
return False # role_name requirement failed: return False
# All requirements have been met: return True
return True
|
mit
| 2,653,800,167,023,835,600 | 42.203883 | 106 | 0.602921 | false | 4.405941 | false | false | false |
abrt/faf
|
src/pyfaf/storage/migrations/versions/168c63b81f85_report_history_default_value.py
|
1
|
1945
|
# Copyright (C) 2014 ABRT Team
# Copyright (C) 2014 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
"""
Report history default value
Revision ID: 168c63b81f85
Revises: 183a15e52a4f
Create Date: 2016-12-13 15:49:32.883743
"""
from alembic.op import alter_column, execute
# revision identifiers, used by Alembic.
revision = '168c63b81f85'
down_revision = '1c4d6317721a'
def upgrade() -> None:
alter_column('reporthistorydaily', 'unique', server_default="0")
alter_column('reporthistoryweekly', 'unique', server_default="0")
alter_column('reporthistorymonthly', 'unique', server_default="0")
execute('UPDATE reporthistorydaily SET "unique" = 0 WHERE "unique" IS NULL')
execute('UPDATE reporthistoryweekly SET "unique" = 0 WHERE "unique" IS NULL')
execute('UPDATE reporthistorymonthly SET "unique" = 0 WHERE "unique" IS NULL')
def downgrade() -> None:
alter_column('reporthistorydaily', 'unique', server_default=None)
alter_column('reporthistoryweekly', 'unique', server_default=None)
alter_column('reporthistorymonthly', 'unique', server_default=None)
execute('UPDATE reporthistorydaily SET "unique" = NULL WHERE "unique" = 0')
execute('UPDATE reporthistoryweekly SET "unique" = NULL WHERE "unique" = 0')
execute('UPDATE reporthistorymonthly SET "unique" = NULL WHERE "unique" = 0')
|
gpl-3.0
| 7,853,489,964,225,810,000 | 37.137255 | 82 | 0.731105 | false | 3.504505 | false | false | false |
sradevski/homeAutomate
|
scripts/laptop_on_network.py
|
1
|
1994
|
#!/usr/bin/python
import remote_core as core
import os
import sys
import nmap
import datetime
import time
import re
import go_to_sleep
try:
nm = nmap.PortScanner() # instance of nmap.PortScanner
except nmap.PortScannerError:
print('Nmap not found', sys.exc_info()[0])
sys.exit(0)
except:
print("Unexpected error:", sys.exc_info()[0])
sys.exit(0)
macAddressToSearch = '64:76:BA:A3:43:B0'
laptopHasBeenTurnedOn = False
disconnectedCounter = 0
def checkIfLaptopOn():
global macAddressToSearch, laptopHasBeenTurnedOn, disconnectedCounter
curHosts = []
# nm.scan(hosts = '192.168.11.1-8', arguments = '-n -sP -PS 7,22,88,443,80,660,2195 -PA 80,22,443 -PU -T3')
nm.scan(hosts = '192.168.11.1-8', arguments = '-n -sn -PR')
for host in nm.all_hosts():
try:
mac = nm[host]['addresses']['mac']
vendor = nm[host]['vendor'][mac]
except:
vendor = mac = 'unknown'
curHosts.append(mac)
localtime = time.asctime(time.localtime(time.time()))
print('============ {0} ============'.format(localtime))
for host in curHosts:
print(host)
config = core.load_config();
if config['location']['am_home']:
if macAddressToSearch not in curHosts:
if laptopHasBeenTurnedOn:
if disconnectedCounter > 3:
wentToSleepScript()
laptopHasBeenTurnedOn = False
disconnectedCounter += 1
else:
laptopHasBeenTurnedOn = True
def wentToSleepScript():
time.sleep(10)
go_to_sleep.go_to_sleep()
# print("SLEEPING")
if __name__ == '__main__':
start_at_hour = 22
stop_at_hour = 2
sleep_seconds = 60 * 60 * (start_at_hour - stop_at_hour) - 20
while True:
localtime = time.localtime(time.time())
if localtime.tm_hour > stop_at_hour and localtime.tm_hour < start_at_hour:
time.sleep(sleep_seconds - (60 * 60 * (start_at_hour - localtime.tm_hour)))
time.sleep(10)
checkIfLaptopOn()
|
mit
| 6,664,738,618,122,529,000 | 25.586667 | 110 | 0.61986 | false | 3.091473 | false | false | false |
JordanReiter/django-notification
|
notification/views.py
|
1
|
6596
|
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
try:
from django.contrib.syndication.views import Feed
except ImportError:
from django.contrib.syndication.views import feed as Feed
from notification.models import *
from notification.decorators import basic_auth_required, simple_basic_auth_callback
from notification.feeds import NoticeUserFeed
@basic_auth_required(realm="Notices Feed", callback_func=simple_basic_auth_callback)
def feed_for_user(request):
"""
An atom feed for all unarchived :model:`notification.Notice`s for a user.
"""
url = "feed/%s" % request.user.username
return Feed(request, url, {
"feed": NoticeUserFeed,
})
@login_required
def notices(request):
"""
The main notices index view.
Template: :template:`notification/notices.html`
Context:
notices
A list of :model:`notification.Notice` objects that are not archived
and to be displayed on the site.
"""
notices = Notice.objects.notices_for(request.user, on_site=True)
return render_to_response("notification/notices.html", {
"notices": notices,
}, context_instance=RequestContext(request))
@login_required
def notice_settings(request):
"""
The notice settings view.
Template: :template:`notification/notice_settings.html`
Context:
notice_types
A list of all :model:`notification.NoticeType` objects.
notice_settings
A dictionary containing ``column_headers`` for each ``NOTICE_MEDIA``
and ``rows`` containing a list of dictionaries: ``notice_type``, a
:model:`notification.NoticeType` object and ``cells``, a list of
tuples whose first value is suitable for use in forms and the second
value is ``True`` or ``False`` depending on a ``request.POST``
variable called ``form_label``, whose valid value is ``on``.
"""
notice_types = NoticeType.objects.all()
settings_table = []
for notice_type in notice_types:
settings_row = []
for medium_id, medium_display in NOTICE_MEDIA:
form_label = "%s_%s" % (notice_type.label, medium_id)
setting = get_notification_setting(request.user, notice_type, medium_id)
if request.method == "POST":
if request.POST.get(form_label) == "on":
if not setting.send:
setting.send = True
setting.save()
else:
if setting.send:
setting.send = False
setting.save()
settings_row.append((form_label, setting.send))
settings_table.append({"notice_type": notice_type, "cells": settings_row})
if request.method == "POST":
next_page = request.POST.get("next_page", ".")
return HttpResponseRedirect(next_page)
notice_settings = {
"column_headers": [medium_display for medium_id, medium_display in NOTICE_MEDIA],
"rows": settings_table,
}
return render_to_response("notification/notice_settings.html", {
"notice_types": notice_types,
"notice_settings": notice_settings,
}, context_instance=RequestContext(request))
@login_required
def single(request, id, mark_seen=True):
"""
Detail view for a single :model:`notification.Notice`.
Template: :template:`notification/single.html`
Context:
notice
The :model:`notification.Notice` being viewed
Optional arguments:
mark_seen
If ``True``, mark the notice as seen if it isn't
already. Do nothing if ``False``. Default: ``True``.
"""
notice = get_object_or_404(Notice, id=id)
if request.user == notice.recipient:
if mark_seen and notice.unseen:
notice.unseen = False
notice.save()
return render_to_response("notification/single.html", {
"notice": notice,
}, context_instance=RequestContext(request))
raise Http404
@login_required
def archive(request, noticeid=None, next_page=None):
"""
Archive a :model:`notices.Notice` if the requesting user is the
recipient or if the user is a superuser. Returns a
``HttpResponseRedirect`` when complete.
Optional arguments:
noticeid
The ID of the :model:`notices.Notice` to be archived.
next_page
The page to redirect to when done.
"""
if noticeid:
try:
notice = Notice.objects.get(id=noticeid)
if request.user == notice.recipient or request.user.is_superuser:
notice.archive()
else: # you can archive other users' notices
# only if you are superuser.
return HttpResponseRedirect(next_page)
except Notice.DoesNotExist:
return HttpResponseRedirect(next_page)
return HttpResponseRedirect(next_page)
@login_required
def delete(request, noticeid=None, next_page=None):
"""
Delete a :model:`notices.Notice` if the requesting user is the recipient
or if the user is a superuser. Returns a ``HttpResponseRedirect`` when
complete.
Optional arguments:
noticeid
The ID of the :model:`notices.Notice` to be archived.
next_page
The page to redirect to when done.
"""
if noticeid:
try:
notice = Notice.objects.get(id=noticeid)
if request.user == notice.recipient or request.user.is_superuser:
notice.delete()
else: # you can delete other users' notices
# only if you are superuser.
return HttpResponseRedirect(next_page)
except Notice.DoesNotExist:
return HttpResponseRedirect(next_page)
return HttpResponseRedirect(next_page)
@login_required
def mark_all_seen(request):
"""
Mark all unseen notices for the requesting user as seen. Returns a
``HttpResponseRedirect`` when complete.
"""
for notice in Notice.objects.notices_for(request.user, unseen=True):
notice.unseen = False
notice.save()
return HttpResponseRedirect(reverse("notification_notices"))
|
mit
| 8,042,785,939,941,627,000 | 32.482234 | 89 | 0.622347 | false | 4.336621 | false | false | false |
alexwaters/python-readability-api
|
readability/models.py
|
1
|
5472
|
# -*- coding: utf-8 -*-
"""
readability.models
~~~~~~~~~~~~~~~~~~
This module provides the core Readability API models.
"""
from .helpers import to_python, to_api
class BaseResource(object):
"""A Base BaseResource object."""
def __init__(self):
super(BaseResource, self).__init__()
self._rdd = None
def __dir__(self):
d = self.__dict__.copy()
try:
del d['_rdd']
except KeyError:
pass
return d.keys()
class Bookmark(BaseResource):
"""Bookmark API Model."""
def __init__(self):
self.id = None
self.user_id = None
self.read_percent = None
self.date_updated = None
self.favorite = None
self.archive = None
self.date_archived = None
self.date_opened = None
self.date_added = None
self.article = None
def __repr__(self):
return '<bookmark id="%s" favorite="%s" archive="%s" read_percent="%s">' % (self.id, self.favorite, self.archive, self.read_percent)
@staticmethod
def new_from_dict(d, rdd=None):
b = to_python(
obj=Bookmark(), in_dict=d,
string_keys = (
'id', 'user_id', 'read_percent', 'favorite', 'archive',
'author',
),
date_keys = ('date_updated', 'date_archived', 'date_opened', 'date_added'),
object_map = {'article': Article},
_rdd = rdd
)
return b
def delete(self):
"""Deletes Bookmark."""
return self._rdd._delete_resource(('bookmarks', self.id))
def update(self):
"""Updates Bookmark."""
args = to_api(
dict(
favorite=self.favorite,
archive=self.archive,
read_percent=self.read_percent,
),
int_keys=('favorite', 'archive')
)
r = self._rdd._post_resource(('bookmarks', self.id), **args)
return r
class Article(BaseResource):
def __init__(self):
self.id = None
self.domain = None
self.title = None
self.url = None
self.short_url = None
self.author = None
self.word_count = None
self.content = None
self.excerpt = None
self.date_published = None
self.next_page_href = None
self.processed = None
self.content_size = None
def __repr__(self):
return '<article id="%s">' % (self.id,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Article(), in_dict=d,
string_keys = (
'id', 'domain', 'title', 'url', 'short_url', 'author',
'word_count', 'content', 'excerpt', 'next_page_href',
'processed', 'content_size',
),
date_keys = ('date_published',),
_rdd = rdd
)
class Domain(BaseResource):
def __init__(self):
super(Domain, self).__init__()
self.fqdn = None
self.articles_ref = None
def __repr__(self):
return '<domain fqdn="%s">' % (self.fqdn,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Domain(), in_dict=d,
string_keys = ('fqdn', 'articles_ref'),
_rdd = rdd
)
def articles(self, **filters):
"""Returns Article list, filtered by Domain."""
return self._rdd.get_articles(domain=self.fqdn, **filters)
def contributions(self, **filters):
"""Returns Article list, filtered by Domain."""
return self._rdd.get_contributions(domain=self.fqdn, **filters)
class Contribution(BaseResource):
def __init__(self):
super(Contribution, self).__init__()
self.date = None
self.contribution = None
self.user = None
self.domain = None
self.num_bookmarks = None
def __repr__(self):
return '<contribution domain="%s">' % (self.domain,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Contribution(), in_dict=d,
string_keys = ('contribution', 'user', 'domain', 'num_bookmarks'),
date_keys = ('date'),
_rdd = rdd
)
class User(BaseResource):
"""User API Model."""
def __init__(self):
self.username = None
self.first_name = None
self.last_name = None
self.date_joined = None
def __repr__(self):
return '<user name="%s">' % (self.username,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=User(), in_dict=d,
string_keys = ('username', 'first_name'),
date_keys = ('date_joined',),
_rdd=rdd
)
def bookmarks(self, **filters):
"""Returns Bookmark list, filtered by User."""
if self.username == self._rdd.username:
return self._rdd.get_bookmarks(user=self.username, **filters)
else:
return self._rdd.get_bookmarks_by_user(self.username, **filters)
def contributions(self, **filters):
"""Returns Contributions list, filtered by User."""
if self.username == self._rdd.username:
return self._rdd.get_contributions(user=self.username, **filters)
else:
return self._rdd.get_contributions_by_user(self.username, **filters)
|
mit
| -2,055,132,855,764,576,500 | 22.088608 | 140 | 0.524671 | false | 4.01173 | false | false | false |
kaphka/catconv
|
convert.py
|
1
|
1091
|
import argparse
import signal
from tqdm import tqdm
import catconv.operations as co
import catconv.stabi as sb
exit = False
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
exit = True
parser = argparse.ArgumentParser()
parser.add_argument("source")
parser.add_argument("target")
parser.add_argument("-u", "--update", help="overwrite previous results",
action="store_true")
args = parser.parse_args()
source = sb.op.normpath(args.source)
target = sb.op.normpath(args.target)
data_dir, target_cat_name = sb.op.split(target)
pages = map(sb.page_from_path, sb.catalog_pages(source,ext=".tif"))
print("Source catalog:")
print("path:", source)
print("pages:", len(pages))
conversion = {"ext": ".jpg", "remove_type": True, "to_cat": data_dir,"cat": target_cat_name}
from_to = [(page, sb.convert_page_path(page, conversion)) for page in pages]
for ft in tqdm(from_to):
if exit:
break
from_page, to_page = ft
if sb.op.isfile(to_page['path']) and not args.update:
continue
else:
co.convert_to_png(*ft)
|
apache-2.0
| 5,971,109,955,525,650,000 | 24.372093 | 92 | 0.669111 | false | 3.190058 | false | false | false |
alirizakeles/zato
|
code/zato-zmq/src/zato/zmq_/mdp/worker.py
|
1
|
9531
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2016 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
import time
from datetime import datetime, timedelta
# ZeroMQ
import zmq.green as zmq
# Zato
from zato.zmq_.mdp import BaseZMQConnection, const, EventWorkerDisconnect, EventWorkerHeartbeat, EventReady, EventWorkerReply
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class Worker(BaseZMQConnection):
""" Standalone implementation of a worker for ZeroMQ Majordomo Protocol 0.1 http://rfc.zeromq.org/spec:7
"""
def __init__(self, service_name, broker_address='tcp://localhost:47047', linger=0, poll_interval=100, log_details=False,
heartbeat=3, heartbeat_mult=2, reconnect_sleep=2):
self.service_name = service_name
super(Worker, self).__init__(broker_address, linger, poll_interval, log_details)
# How often, in seconds, to send a heartbeat to the broker or expect one from the broker
self.heartbeat = heartbeat
# If self.heartbeat * self.heartbeat_mult is exceeded, we assume the broker is down
self.heartbeat_mult = heartbeat_mult
# How long, in seconds, to wait before attempting to reconnect to the broker
self.reconnect_sleep = reconnect_sleep
# When did we last hear from the broker
self.broker_last_heartbeat = None
# When did we last send our own heartbeat to the broker
self.worker_last_heartbeat = None
# Timestamp of when we started to run
self.last_connected = datetime.utcnow()
self.has_debug = logger.isEnabledFor(logging.DEBUG)
# Maps event IDs to methods that handle a given one
self.handle_event_map = {
const.v01.request_to_worker: self.on_event_request_to_worker,
const.v01.heartbeat: self.on_event_heartbeat,
const.v01.disconnect: self.on_event_disconnect,
}
# ################################################################################################################################
def connect(self):
logger.info('Connecting to broker %s', self.broker_address)
# Open ZeroMQ sockets first
# From worker to broker
self.client_socket.connect(self.broker_address)
# From broker to worker
self.worker_socket = self.ctx.socket(zmq.DEALER)
self.worker_socket.linger = self.linger
self.worker_poller = zmq.Poller()
self.worker_poller.register(self.worker_socket, zmq.POLLIN)
self.worker_socket.connect(self.broker_address)
# Ok, we are ready
self.notify_ready()
# We can assume that the broker received our message
self.last_connected = datetime.utcnow()
# ################################################################################################################################
def stop(self):
self.worker_poller.unregister(self.worker_socket)
self.worker_socket.close()
self.stop_client_socket()
self.connect_client_socket()
logger.info('Stopped worker for %s', self.broker_address)
# ################################################################################################################################
def needs_reconnect(self):
base_timestamp = self.broker_last_heartbeat if self.broker_last_heartbeat else self.last_connected
return datetime.utcnow() >= base_timestamp + timedelta(seconds=self.heartbeat * self.heartbeat_mult)
# ################################################################################################################################
def reconnect(self):
last_hb = '{} (UTC)'.format(self.broker_last_heartbeat.isoformat()) if self.broker_last_heartbeat else 'never'
logger.info('Sleeping for %ss before reconnecting to broker %s, last HB from broker: %s',
self.reconnect_sleep, self.broker_address, last_hb)
time.sleep(self.reconnect_sleep)
logger.info('Reconnecting to broker %s', self.broker_address)
self.stop()
self.connect()
# Let's give the other side a moment to reply to our ready event
time.sleep(self.reconnect_sleep)
# ################################################################################################################################
def needs_hb_to_broker(self):
return datetime.utcnow() >= self.worker_last_heartbeat + timedelta(seconds=self.heartbeat)
# ################################################################################################################################
def serve_forever(self):
# To speed up look-ups
log_details = self.log_details
# Main loop
while self.keep_running:
try:
items = self.worker_poller.poll(self.poll_interval)
except KeyboardInterrupt:
self.notify_disconnect()
break
if items:
msg = self.worker_socket.recv_multipart()
if log_details:
logger.info('Received msg at %s %s', self.broker_address, msg)
self.handle(msg)
else:
if log_details:
logger.info('No items for worker at %s', self.broker_address)
if self.needs_hb_to_broker():
self.notify_heartbeat()
if self.needs_reconnect():
self.reconnect()
# ################################################################################################################################
def on_event_request_to_worker(self, msg):
logger.info('In _handle %s', msg)
return datetime.utcnow().isoformat()
# ################################################################################################################################
def on_event_heartbeat(self, *ignored):
""" A no-op since self.handle already handles heartbeats from the broker.
"""
# ################################################################################################################################
def on_event_disconnect(self, *ignored):
""" Our broker tells us to disconnect - according to the spec we now must re-open the connection.
"""
self.reconnect()
# ################################################################################################################################
def handle(self, msg):
logger.info('Handling %s', msg)
# Since we received this message, it means the broker is up so the message,
# no matter what event it is, allows us to update the timestamp of the last HB from broker
self.broker_last_heartbeat = datetime.utcnow()
sender_id = None
body = None
command = msg[2]
if command == const.v01.request_to_worker:
sender_id = msg[3]
body = msg[4]
# Hand over the message to an actual implementation and reply if told to
response = self.handle_event_map[command](body)
if response:
self.send(EventWorkerReply(response, sender_id).serialize())
# Message handled, we are ready to handle a new one, assuming this one was a request
if command == const.v01.request_to_worker:
self.notify_ready()
# ################################################################################################################################
def send(self, data, needs_hb=True):
""" Sends data to the broker and updates an internal timer of when the last time we send a heartbeat to the broker
since sending anything in that direction should be construed by the broker as a heartbeat itself.
"""
# Send data first
self.worker_socket.send_multipart(data)
# Update the timer
if needs_hb:
self.worker_last_heartbeat = datetime.utcnow()
# ################################################################################################################################
def notify_ready(self):
""" Notify the broker that we are ready to handle a new message.
"""
self.send(EventReady(self.service_name).serialize())
# ################################################################################################################################
def notify_heartbeat(self):
""" Notify the broker that we are still around.
"""
self.send(EventWorkerHeartbeat().serialize())
# ################################################################################################################################
def notify_disconnect(self):
""" Notify the broker that we are to disconnect from it.
"""
self.send(EventWorkerDisconnect().serialize(), needs_hb=False)
# ################################################################################################################################
if __name__ == '__main__':
w = Worker(b'My service', 'tcp://localhost:47047')
w.connect()
w.serve_forever()
|
gpl-3.0
| -5,260,113,745,436,168,000 | 37.587045 | 130 | 0.484 | false | 5.113197 | false | false | false |
antonygc/liblightbase
|
liblightbase/lbdoc/metaclass.py
|
1
|
6065
|
from liblightbase import lbutils
from liblightbase.lbdoc.metadata import DocumentMetadata
def generate_metaclass(struct, base=None):
"""
Generate document metaclass. The document metaclass
is an abstraction of document model defined by base
structures.
@param struct: Field or Group object.
@param base: Base object or None.
"""
build_metadata = False
if base is None:
base = struct
build_metadata = True
snames = struct.content.__snames__
rnames = struct.content.__rnames__
class MetaClass(object):
"""
Document metaclass. Describes the structures defifined by
document structure model.
"""
# @property __valreq__: Flag used to validate required
# fields or not.
__valreq__ = True
# @property __slots__: reserves space for the declared
# variables and prevents the automatic creation of
# __dict__ and __weakref__ for each instance.
__slots__ = ['_' + sname for sname in snames]
if build_metadata:
__slots__.append('__metadata__')
def __init__(self, **kwargs):
""" Document MetaClass constructor
"""
if self.__valreq__:
lbutils.validate_required(rnames, kwargs)
for arg in kwargs:
setattr(self, arg, kwargs[arg])
for childstruct in struct.content:
structname, prop = generate_property(base, childstruct)
setattr(MetaClass, structname, prop)
if build_metadata:
MetaClass._metadata = build_metadata_prop()
MetaClass.__name__ = struct.metadata.name
return MetaClass
def generate_property(base, struct):
"""
Make python's property based on structure attributes.
@param base: Base object.
@param struct: Field or Group object.
"""
if struct.is_field:
structname = struct.name
elif struct.is_group:
structname = struct.metadata.name
attr_name = '_' + structname
def getter(self):
value = getattr(self, attr_name)
if struct.is_field:
return getattr(value, '__value__')
return value
def setter(self, value):
struct_metaclass = base.metaclass(structname)
if struct.is_field:
value = struct_metaclass(value)
elif struct.is_group:
if struct.metadata.multivalued:
msg = 'object {} should be instance of {}'.format(
struct.metadata.name, list)
assert isinstance(value, list), msg
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assertion = all(isinstance(element, struct_metaclass) \
for element in value)
assert assertion, msg
value = generate_multimetaclass(struct,
struct_metaclass)(value)
else:
msg = '{} object should be an instance of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(value, struct_metaclass), msg
setattr(self, attr_name, value)
def deleter(self):
delattr(self, attr_name)
return structname, property(getter,
setter, deleter, structname)
def build_metadata_prop():
def fget(self):
return self.__metadata__
def fset(self, value):
msg = '_metadata attribute should be a DocumentMetadata object.'
assert isinstance(value, DocumentMetadata)
self.__metadata__ = value
def fdel(self):
del self.__metadata__
return property(fget, fset, fdel, '_metadata')
def generate_multimetaclass(struct, struct_metaclass):
"""
Generate metaclass to use with multivalued groups.
@param struct: Field or Group object
@param struct_metaclass: The struct Metaclass
"""
class MultiGroupMetaClass(list):
"""
Multivalued Group Metaclass. Metaclass used to ensure list
elements are instances of right metaclasses.
"""
def __setitem__(self, index, element):
""" x.__setitem__(y, z) <==> x[y] = z
"""
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(element, struct_metaclass), msg
return super(MultiGroupMetaClass, self).__setitem__(index,
element)
def append(self, element):
""" L.append(object) -- append object to end
"""
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(element, struct_metaclass), msg
return super(MultiGroupMetaClass, self).append(element)
return MultiGroupMetaClass
def generate_field_metaclass(field, base):
"""
Generate field metaclass. The field metaclass
validates incoming value against fields' datatype.
@param field: Field object.
@param base: Base object.
"""
class FieldMetaClass(object):
"""
Field MetaClass. validates incoming
value against fields' datatype.
"""
def __init__(self, value):
self.__value__ = value
def __setattr__(self, obj, value):
validator = field._datatype.__schema__(base, field, 0)
if field.multivalued is True:
msg = 'Expected type list for {}, but found {}'
assert isinstance(value, list), msg.format(
field.name, type(value))
value = [validator(element) for element in value]
else:
value = validator(value)
super(FieldMetaClass, self).__setattr__('__value__', value)
def __getattr__(self, obj):
return super(FieldMetaClass, self).__getattribute__('__value__')
FieldMetaClass.__name__ = field.name
return FieldMetaClass
|
gpl-2.0
| 6,355,603,194,399,791,000 | 33.460227 | 76 | 0.588458 | false | 4.658218 | false | false | false |
aio-libs/aiozmq
|
examples/core_dealer_router.py
|
1
|
1579
|
import asyncio
import aiozmq
import zmq
class ZmqDealerProtocol(aiozmq.ZmqProtocol):
transport = None
def __init__(self, queue, on_close):
self.queue = queue
self.on_close = on_close
def connection_made(self, transport):
self.transport = transport
def msg_received(self, msg):
self.queue.put_nowait(msg)
def connection_lost(self, exc):
self.on_close.set_result(exc)
class ZmqRouterProtocol(aiozmq.ZmqProtocol):
transport = None
def __init__(self, on_close):
self.on_close = on_close
def connection_made(self, transport):
self.transport = transport
def msg_received(self, msg):
self.transport.write(msg)
def connection_lost(self, exc):
self.on_close.set_result(exc)
async def go():
router_closed = asyncio.Future()
dealer_closed = asyncio.Future()
router, _ = await aiozmq.create_zmq_connection(
lambda: ZmqRouterProtocol(router_closed), zmq.ROUTER, bind="tcp://127.0.0.1:*"
)
addr = list(router.bindings())[0]
queue = asyncio.Queue()
dealer, _ = await aiozmq.create_zmq_connection(
lambda: ZmqDealerProtocol(queue, dealer_closed), zmq.DEALER, connect=addr
)
for i in range(10):
msg = (b"data", b"ask", str(i).encode("utf-8"))
dealer.write(msg)
answer = await queue.get()
print(answer)
dealer.close()
await dealer_closed
router.close()
await router_closed
def main():
asyncio.run(go())
print("DONE")
if __name__ == "__main__":
main()
|
bsd-2-clause
| -5,655,556,457,899,408,000 | 21.239437 | 86 | 0.621279 | false | 3.455142 | false | false | false |
pyfa-org/eos
|
eos/item/mixin/effect_stats/remote_repair.py
|
1
|
1829
|
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos.eve_obj.effect.repairs.base import RemoteArmorRepairEffect
from eos.eve_obj.effect.repairs.base import RemoteShieldRepairEffect
from eos.item.mixin.base import BaseItemMixin
class RemoteRepairMixin(BaseItemMixin):
def __repair_effect_iter(self, effect_class):
for effect in self._type_effects.values():
if not isinstance(effect, effect_class):
continue
if effect.id not in self._running_effect_ids:
continue
yield effect
def get_armor_rps(self, reload=False):
rps = 0
for effect in self.__repair_effect_iter(RemoteArmorRepairEffect):
rps += effect.get_rps(self, reload=reload)
return rps
def get_shield_rps(self, reload=False):
rps = 0
for effect in self.__repair_effect_iter(RemoteShieldRepairEffect):
rps += effect.get_rps(self, reload=reload)
return rps
|
lgpl-3.0
| -2,539,301,026,785,657,300 | 37.914894 | 80 | 0.636413 | false | 4.04646 | false | false | false |
bollu/polymage
|
sandbox/apps/python/img_proc/harris/init.py
|
1
|
1485
|
import sys
import os.path
from PIL import Image
import numpy as np
from arg_parser import parse_args
from printer import print_header, print_usage, print_line
def init_images(app_data):
print("[init.py] : initializing images...")
app_args = app_data['app_args']
# input image:
img_path = app_args.img_file
img = np.array(Image.open(img_path).convert('1'))
rows, cols = img.shape
# convert to float image
IN = np.array(img)
IN = IN.astype(np.float32).ravel()
# final output image
OUT = np.zeros((rows, cols), np.float32).ravel()
img_data = {}
img_data['IN'] = IN
img_data['OUT'] = OUT
app_data['img_data'] = img_data
app_data['rows'] = rows
app_data['cols'] = cols
return
def get_input(app_data):
# parse the command-line arguments
app_args = parse_args()
app_data['app_args'] = app_args
app_data['mode'] = app_args.mode
app_data['runs'] = int(app_args.runs)
app_data['graph_gen'] = bool(app_args.graph_gen)
app_data['timer'] = app_args.timer
# storage optimization
app_data['optimize_storage'] = bool(app_args.optimize_storage)
# early freeing of allocated arrays
app_data['early_free'] = bool(app_args.early_free)
# pool allocate option
app_data['pool_alloc'] = bool(app_args.pool_alloc)
return
def init_all(app_data):
pipe_data = {}
app_data['pipe_data'] = pipe_data
get_input(app_data)
init_images(app_data)
return
|
apache-2.0
| -1,343,414,416,860,723,500 | 22.203125 | 66 | 0.630976 | false | 3.207343 | false | false | false |
Endika/mitmproxy
|
libmproxy/contentviews.py
|
1
|
16688
|
"""
Mitmproxy Content Views
=======================
mitmproxy includes a set of content views which can be used to format/decode/highlight data.
While they are currently used for HTTP message bodies only, the may be used in other contexts
in the future, e.g. to decode protobuf messages sent as WebSocket frames.
Thus, the View API is very minimalistic. The only arguments are `data` and `**metadata`,
where `data` is the actual content (as bytes). The contents on metadata depend on the protocol in
use. For HTTP, the message headers are passed as the ``headers`` keyword argument.
"""
from __future__ import (absolute_import, print_function, division)
import cStringIO
import json
import logging
import subprocess
import sys
import lxml.html
import lxml.etree
import datetime
from PIL import Image
from PIL.ExifTags import TAGS
import html2text
import six
from netlib.odict import ODict
from netlib import encoding
from netlib.utils import clean_bin, hexdump, urldecode, multipartdecode, parse_content_type
from . import utils
from .exceptions import ContentViewException
from .contrib import jsbeautifier
from .contrib.wbxml.ASCommandResponse import ASCommandResponse
try:
import pyamf
from pyamf import remoting, flex
except ImportError: # pragma nocover
pyamf = None
try:
import cssutils
except ImportError: # pragma nocover
cssutils = None
else:
cssutils.log.setLevel(logging.CRITICAL)
cssutils.ser.prefs.keepComments = True
cssutils.ser.prefs.omitLastSemicolon = False
cssutils.ser.prefs.indentClosingBrace = False
cssutils.ser.prefs.validOnly = False
# Default view cutoff *in lines*
VIEW_CUTOFF = 512
KEY_MAX = 30
def format_dict(d):
"""
Helper function that transforms the given dictionary into a list of
("key", key )
("value", value)
tuples, where key is padded to a uniform width.
"""
max_key_len = max(len(k) for k in d.keys())
max_key_len = min(max_key_len, KEY_MAX)
for key, value in d.items():
key += ":"
key = key.ljust(max_key_len + 2)
yield [
("header", key),
("text", value)
]
def format_text(text):
"""
Helper function that transforms bytes into the view output format.
"""
for line in text.splitlines():
yield [("text", line)]
class View(object):
name = None
prompt = ()
content_types = []
def __call__(self, data, **metadata):
"""
Transform raw data into human-readable output.
Args:
data: the data to decode/format as bytes.
metadata: optional keyword-only arguments for metadata. Implementations must not
rely on a given argument being present.
Returns:
A (description, content generator) tuple.
The content generator yields lists of (style, text) tuples, where each list represents
a single line. ``text`` is a unfiltered byte string which may need to be escaped,
depending on the used output.
Caveats:
The content generator must not yield tuples of tuples,
because urwid cannot process that. You have to yield a *list* of tuples per line.
"""
raise NotImplementedError()
class ViewAuto(View):
name = "Auto"
prompt = ("auto", "a")
content_types = []
def __call__(self, data, **metadata):
headers = metadata.get("headers", {})
ctype = headers.get("content-type")
if ctype:
ct = parse_content_type(ctype) if ctype else None
ct = "%s/%s" % (ct[0], ct[1])
if ct in content_types_map:
return content_types_map[ct][0](data, **metadata)
elif utils.isXML(data):
return get("XML")(data, **metadata)
if utils.isMostlyBin(data):
return get("Hex")(data)
return get("Raw")(data)
class ViewRaw(View):
name = "Raw"
prompt = ("raw", "r")
content_types = []
def __call__(self, data, **metadata):
return "Raw", format_text(data)
class ViewHex(View):
name = "Hex"
prompt = ("hex", "e")
content_types = []
@staticmethod
def _format(data):
for offset, hexa, s in hexdump(data):
yield [
("offset", offset + " "),
("text", hexa + " "),
("text", s)
]
def __call__(self, data, **metadata):
return "Hex", self._format(data)
class ViewXML(View):
name = "XML"
prompt = ("xml", "x")
content_types = ["text/xml"]
def __call__(self, data, **metadata):
parser = lxml.etree.XMLParser(
remove_blank_text=True,
resolve_entities=False,
strip_cdata=False,
recover=False
)
try:
document = lxml.etree.fromstring(data, parser)
except lxml.etree.XMLSyntaxError:
return None
docinfo = document.getroottree().docinfo
prev = []
p = document.getroottree().getroot().getprevious()
while p is not None:
prev.insert(
0,
lxml.etree.tostring(p)
)
p = p.getprevious()
doctype = docinfo.doctype
if prev:
doctype += "\n".join(prev).strip()
doctype = doctype.strip()
s = lxml.etree.tostring(
document,
pretty_print=True,
xml_declaration=True,
doctype=doctype or None,
encoding=docinfo.encoding
)
return "XML-like data", format_text(s)
class ViewJSON(View):
name = "JSON"
prompt = ("json", "s")
content_types = ["application/json"]
def __call__(self, data, **metadata):
pretty_json = utils.pretty_json(data)
if pretty_json:
return "JSON", format_text(pretty_json)
class ViewHTML(View):
name = "HTML"
prompt = ("html", "h")
content_types = ["text/html"]
def __call__(self, data, **metadata):
if utils.isXML(data):
parser = lxml.etree.HTMLParser(
strip_cdata=True,
remove_blank_text=True
)
d = lxml.html.fromstring(data, parser=parser)
docinfo = d.getroottree().docinfo
s = lxml.etree.tostring(
d,
pretty_print=True,
doctype=docinfo.doctype,
encoding='utf8'
)
return "HTML", format_text(s)
class ViewHTMLOutline(View):
name = "HTML Outline"
prompt = ("html outline", "o")
content_types = ["text/html"]
def __call__(self, data, **metadata):
data = data.decode("utf-8")
h = html2text.HTML2Text(baseurl="")
h.ignore_images = True
h.body_width = 0
outline = h.handle(data)
return "HTML Outline", format_text(outline)
class ViewURLEncoded(View):
name = "URL-encoded"
prompt = ("urlencoded", "u")
content_types = ["application/x-www-form-urlencoded"]
def __call__(self, data, **metadata):
d = urldecode(data)
return "URLEncoded form", format_dict(ODict(d))
class ViewMultipart(View):
name = "Multipart Form"
prompt = ("multipart", "m")
content_types = ["multipart/form-data"]
@staticmethod
def _format(v):
yield [("highlight", "Form data:\n")]
for message in format_dict(ODict(v)):
yield message
def __call__(self, data, **metadata):
headers = metadata.get("headers", {})
v = multipartdecode(headers, data)
if v:
return "Multipart form", self._format(v)
if pyamf:
class DummyObject(dict):
def __init__(self, alias):
dict.__init__(self)
def __readamf__(self, input):
data = input.readObject()
self["data"] = data
def pyamf_class_loader(s):
for i in pyamf.CLASS_LOADERS:
if i != pyamf_class_loader:
v = i(s)
if v:
return v
return DummyObject
pyamf.register_class_loader(pyamf_class_loader)
class ViewAMF(View):
name = "AMF"
prompt = ("amf", "f")
content_types = ["application/x-amf"]
def unpack(self, b, seen=set([])):
if hasattr(b, "body"):
return self.unpack(b.body, seen)
if isinstance(b, DummyObject):
if id(b) in seen:
return "<recursion>"
else:
seen.add(id(b))
for k, v in b.items():
b[k] = self.unpack(v, seen)
return b
elif isinstance(b, dict):
for k, v in b.items():
b[k] = self.unpack(v, seen)
return b
elif isinstance(b, list):
return [self.unpack(i) for i in b]
elif isinstance(b, datetime.datetime):
return str(b)
elif isinstance(b, flex.ArrayCollection):
return [self.unpack(i, seen) for i in b]
else:
return b
def _format(self, envelope):
for target, message in iter(envelope):
if isinstance(message, pyamf.remoting.Request):
yield [
("header", "Request: "),
("text", str(target)),
]
else:
yield [
("header", "Response: "),
("text", "%s, code %s" % (target, message.status)),
]
s = json.dumps(self.unpack(message), indent=4)
for msg in format_text(s):
yield msg
def __call__(self, data, **metadata):
envelope = remoting.decode(data, strict=False)
if envelope:
return "AMF v%s" % envelope.amfVersion, self._format(envelope)
class ViewJavaScript(View):
name = "JavaScript"
prompt = ("javascript", "j")
content_types = [
"application/x-javascript",
"application/javascript",
"text/javascript"
]
def __call__(self, data, **metadata):
opts = jsbeautifier.default_options()
opts.indent_size = 2
res = jsbeautifier.beautify(data, opts)
return "JavaScript", format_text(res)
class ViewCSS(View):
name = "CSS"
prompt = ("css", "c")
content_types = [
"text/css"
]
def __call__(self, data, **metadata):
if cssutils:
sheet = cssutils.parseString(data)
beautified = sheet.cssText
else:
beautified = data
return "CSS", format_text(beautified)
class ViewImage(View):
name = "Image"
prompt = ("image", "i")
content_types = [
"image/png",
"image/jpeg",
"image/gif",
"image/vnd.microsoft.icon",
"image/x-icon",
]
def __call__(self, data, **metadata):
try:
img = Image.open(cStringIO.StringIO(data))
except IOError:
return None
parts = [
("Format", str(img.format_description)),
("Size", "%s x %s px" % img.size),
("Mode", str(img.mode)),
]
for i in sorted(img.info.keys()):
if i != "exif":
parts.append(
(str(i), str(img.info[i]))
)
if hasattr(img, "_getexif"):
ex = img._getexif()
if ex:
for i in sorted(ex.keys()):
tag = TAGS.get(i, i)
parts.append(
(str(tag), str(ex[i]))
)
fmt = format_dict(ODict(parts))
return "%s image" % img.format, fmt
class ViewProtobuf(View):
"""Human friendly view of protocol buffers
The view uses the protoc compiler to decode the binary
"""
name = "Protocol Buffer"
prompt = ("protobuf", "p")
content_types = [
"application/x-protobuf",
"application/x-protobuffer",
]
@staticmethod
def is_available():
try:
p = subprocess.Popen(
["protoc", "--version"],
stdout=subprocess.PIPE
)
out, _ = p.communicate()
return out.startswith("libprotoc")
except:
return False
def decode_protobuf(self, content):
# if Popen raises OSError, it will be caught in
# get_content_view and fall back to Raw
p = subprocess.Popen(['protoc', '--decode_raw'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate(input=content)
if out:
return out
else:
return err
def __call__(self, data, **metadata):
decoded = self.decode_protobuf(data)
return "Protobuf", format_text(decoded)
class ViewWBXML(View):
name = "WBXML"
prompt = ("wbxml", "w")
content_types = [
"application/vnd.wap.wbxml",
"application/vnd.ms-sync.wbxml"
]
def __call__(self, data, **metadata):
try:
parser = ASCommandResponse(data)
parsedContent = parser.xmlString
if parsedContent:
return "WBXML", format_text(parsedContent)
except:
return None
views = []
content_types_map = {}
view_prompts = []
def get(name):
for i in views:
if i.name == name:
return i
def get_by_shortcut(c):
for i in views:
if i.prompt[1] == c:
return i
def add(view):
# TODO: auto-select a different name (append an integer?)
for i in views:
if i.name == view.name:
raise ContentViewException("Duplicate view: " + view.name)
# TODO: the UI should auto-prompt for a replacement shortcut
for prompt in view_prompts:
if prompt[1] == view.prompt[1]:
raise ContentViewException("Duplicate view shortcut: " + view.prompt[1])
views.append(view)
for ct in view.content_types:
l = content_types_map.setdefault(ct, [])
l.append(view)
view_prompts.append(view.prompt)
def remove(view):
for ct in view.content_types:
l = content_types_map.setdefault(ct, [])
l.remove(view)
if not len(l):
del content_types_map[ct]
view_prompts.remove(view.prompt)
views.remove(view)
add(ViewAuto())
add(ViewRaw())
add(ViewHex())
add(ViewJSON())
add(ViewXML())
add(ViewWBXML())
add(ViewHTML())
add(ViewHTMLOutline())
add(ViewJavaScript())
add(ViewCSS())
add(ViewURLEncoded())
add(ViewMultipart())
add(ViewImage())
if pyamf:
add(ViewAMF())
if ViewProtobuf.is_available():
add(ViewProtobuf())
def safe_to_print(lines, encoding="utf8"):
"""
Wraps a content generator so that each text portion is a *safe to print* unicode string.
"""
for line in lines:
clean_line = []
for (style, text) in line:
try:
text = clean_bin(text.decode(encoding, "strict"))
except UnicodeDecodeError:
text = clean_bin(text).decode(encoding, "strict")
clean_line.append((style, text))
yield clean_line
def get_content_view(viewmode, data, **metadata):
"""
Args:
viewmode: the view to use.
data, **metadata: arguments passed to View instance.
Returns:
A (description, content generator) tuple.
In contrast to calling the views directly, text is always safe-to-print unicode.
Raises:
ContentViewException, if the content view threw an error.
"""
if not data:
return "No content", []
msg = []
headers = metadata.get("headers", {})
enc = headers.get("content-encoding")
if enc and enc != "identity":
decoded = encoding.decode(enc, data)
if decoded:
data = decoded
msg.append("[decoded %s]" % enc)
try:
ret = viewmode(data, **metadata)
# Third-party viewers can fail in unexpected ways...
except Exception as e:
six.reraise(
ContentViewException,
ContentViewException(str(e)),
sys.exc_info()[2]
)
if not ret:
ret = get("Raw")(data, **metadata)
msg.append("Couldn't parse: falling back to Raw")
else:
msg.append(ret[0])
return " ".join(msg), safe_to_print(ret[1])
|
mit
| 1,470,349,869,913,732,900 | 26.583471 | 98 | 0.54704 | false | 3.998083 | false | false | false |
dropbox/changes
|
changes/listeners/mail.py
|
1
|
8772
|
from __future__ import absolute_import, print_function
from itertools import imap
import logging
import toronado
from email.utils import parseaddr
from flask import current_app, render_template
from flask_mail import Message, sanitize_address
from jinja2 import Markup
from typing import List # NOQA
from changes.config import db, mail
from changes.constants import Result, Status
from changes.db.utils import try_create
from changes.lib import build_context_lib, build_type
from changes.lib.build_context_lib import CollectionContext # NOQA
from changes.models.event import Event, EventType
from changes.models.build import Build
from changes.models.job import Job
from changes.models.jobplan import JobPlan
from changes.models.project import ProjectOption
def filter_recipients(email_list, domain_whitelist=None):
"""
Returns emails from email_list that have been white-listed by
domain_whitelist.
"""
if domain_whitelist is None:
domain_whitelist = current_app.config['MAIL_DOMAIN_WHITELIST']
if not domain_whitelist:
return email_list
return [
e for e in email_list
if parseaddr(e)[1].split('@', 1)[-1] in domain_whitelist
]
class MailNotificationHandler(object):
logger = logging.getLogger('mail')
def send(self, msg, build):
msg.recipients = filter_recipients(msg.recipients)
if not msg.recipients:
self.logger.info(
'Exiting for collection_id={} because its message has no '
'recipients.'.format(build.collection_id))
return
event = try_create(Event, where={
'type': EventType.email,
'item_id': build.collection_id,
'data': {
'triggering_build_id': build.id.hex,
'recipients': msg.recipients,
}
})
# If we were unable to create the Event, we must've done so (and thus sent the mail) already.
if not event:
self.logger.warning('An email has already been sent for collection_id=%s, (build_id=%s).',
build.collection_id, build.id.hex)
return
mail.send(msg)
def get_msg(self, builds):
# type: (List[Build]) -> Message
context = build_context_lib.get_collection_context(builds) # type: CollectionContext
if context.result == Result.passed:
return None
max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_MAIL', 3)
context_dict = context._asdict()
context_dict.update({
'MAX_SHOWN_ITEMS_PER_BUILD': max_shown,
'showing_failing_tests_count':
sum([min(b['failing_tests_count'], max_shown) for b in context.builds])
})
recipients = self.get_collection_recipients(context)
msg = Message(context.title, recipients=recipients, extra_headers={
'Reply-To': ', '.join(sanitize_address(r) for r in recipients),
})
msg.body = render_template('listeners/mail/notification.txt', **context_dict)
msg.html = Markup(toronado.from_string(
render_template('listeners/mail/notification.html', **context_dict)
))
return msg
def get_collection_recipients(self, collection_context):
# type: (CollectionContext) -> List[unicode]
"""
Returns a list of recipients for a collection context created by
get_collection_context. Only recipients for failing builds will be
returned.
"""
recipient_lists = map(
lambda build_context: self.get_build_recipients(build_context['build']),
collection_context.builds)
return list(set([r for rs in recipient_lists for r in rs]))
def get_build_recipients(self, build):
# type: (Build) -> List[unicode]
"""
Returns a list of recipients for a build.
The build author is included unless the build and all failing jobs
have turned off the mail.notify-author option.
Successful builds will return the empty list.
Recipients are also collected from each failing job's
mail.notify-addresses and mail.notify-addresses-revisions options.
Should there be no failing jobs (is that possible?), recipients are
collected from the build's own mail.notify-addresses and
mail.notify-addresses-revisions options.
"""
if build.result == Result.passed:
return []
recipients = []
options = self.get_build_options(build)
if options['mail.notify-author']:
author = build.author
if author:
recipients.append(u'%s <%s>' % (author.name, author.email))
recipients.extend(options['mail.notify-addresses'])
if build_type.is_initial_commit_build(build):
recipients.extend(options['mail.notify-addresses-revisions'])
return recipients
def get_build_options(self, build):
"""
Returns a build's mail options as a
{
'mail.notify-author': bool,
'mail.notify-addresses': set,
'mail.notify-addresses-revisions': set,
} dict.
The 'mail.notify-author' option is True unless the build and all
failing jobs have turned off the mail.notify-author option.
The mail.notify-addresses and mail.notify-addresses-revisions options
respectively are sets of email addresses constructed by merging the
corresponding options of all failing jobs. Note that the build's
options are used as defaults when constructing the options for
each job, so that the job options override the build options.
Finally, the build's own options are used if there are no failing jobs.
"""
default_options = {
'mail.notify-author': '1',
'mail.notify-addresses': '',
'mail.notify-addresses-revisions': '',
}
build_options = dict(
default_options,
**dict(db.session.query(
ProjectOption.name, ProjectOption.value
).filter(
ProjectOption.project_id == build.project_id,
ProjectOption.name.in_(default_options.keys()),
))
)
# Get options for all failing jobs.
jobs_options = []
for job in list(Job.query.filter(Job.build_id == build.id)):
if job.result != Result.passed:
jobs_options.append(dict(
build_options, **self.get_job_options(job)))
# Merge all options.
# Fallback to build options in case there are no failing jobs.
all_options = jobs_options or [build_options]
merged_options = {
# Notify the author unless all jobs and the build have turned the
# notify-author option off.
'mail.notify-author': any(
imap(
lambda options: options.get('mail.notify-author') == '1',
all_options,
),
),
'mail.notify-addresses': set(),
'mail.notify-addresses-revisions': set(),
}
recipient_keys = ['mail.notify-addresses', 'mail.notify-addresses-revisions']
for options in all_options:
for key in recipient_keys:
# XXX(dcramer): we dont have option validators so lets assume
# people enter slightly incorrect values
merged_options[key] |= set(
[x.strip() for x in options[key].split(',') if x.strip()]
)
return merged_options
def get_job_options(self, job):
jobplan = JobPlan.query.filter(
JobPlan.job_id == job.id,
).first()
options = {}
if jobplan and 'snapshot' in jobplan.data:
options = jobplan.data['snapshot']['options']
return options
def build_finished_handler(build_id, *args, **kwargs):
build = Build.query.get(build_id)
if not build:
return
if not build.collection_id:
# If there isn't a collection_id, assume the build stands alone.
# All builds should probably have collection_id set.
builds = [build]
else:
builds = list(
Build.query.filter(Build.collection_id == build.collection_id))
# Exit if there are no builds for the given build_id, or any build hasn't
# finished.
if not builds or any(map(lambda build: build.status != Status.finished, builds)):
return
notification_handler = MailNotificationHandler()
msg = notification_handler.get_msg(builds)
if msg is not None:
notification_handler.send(msg, build)
|
apache-2.0
| 7,238,504,638,627,023,000 | 35.39834 | 102 | 0.614683 | false | 4.35119 | false | false | false |
pidydx/grr
|
grr/lib/flows/general/audit.py
|
1
|
2003
|
#!/usr/bin/env python
"""This implements the auditing system.
How does it work?
Noteworthy events within the GRR system (such as approval granting, flow
execution etc) generate events to notify listeners about the event.
The audit system consists of a group of event listeners which receive these
events and act upon them.
"""
from grr.lib import aff4
from grr.lib import events
from grr.lib import flow
from grr.lib import queues
from grr.lib import rdfvalue
from grr.lib import sequential_collection
AUDIT_EVENT = "Audit"
class AuditEventCollection(sequential_collection.IndexedSequentialCollection):
RDF_TYPE = events.AuditEvent
def AllAuditLogs(token=None):
# TODO(user): This is not great, we should store this differently.
for log in aff4.FACTORY.Open("aff4:/audit/logs", token=token).ListChildren():
yield AuditEventCollection(log, token=token)
def AuditLogsForTimespan(start_time, end_time, token=None):
# TODO(user): This is not great, we should store this differently.
for log in aff4.FACTORY.Open(
"aff4:/audit/logs", token=token).ListChildren(age=(start_time, end_time)):
yield AuditEventCollection(log, token=token)
class AuditEventListener(flow.EventListener):
"""Receive the audit events."""
well_known_session_id = rdfvalue.SessionID(
base="aff4:/audit", queue=queues.FLOWS, flow_name="listener")
EVENTS = [AUDIT_EVENT]
created_logs = set()
def EnsureLogIsIndexed(self, log_urn):
if log_urn not in self.created_logs:
# Just write any type to the aff4 space so we can determine
# which audit logs exist easily.
aff4.FACTORY.Create(
log_urn, aff4.AFF4Volume, mode="w", token=self.token).Close()
self.created_logs.add(log_urn)
return log_urn
@flow.EventHandler(auth_required=False)
def ProcessMessage(self, message=None, event=None):
_ = message
log_urn = aff4.CurrentAuditLog()
self.EnsureLogIsIndexed(log_urn)
AuditEventCollection.StaticAdd(log_urn, self.token, event)
|
apache-2.0
| 4,533,730,079,903,174,000 | 30.793651 | 80 | 0.736895 | false | 3.489547 | false | false | false |
MicBrain/Tic_Tac_Toe
|
Tic_Tac_Toe.py
|
1
|
8653
|
###################
### DESCRIPTION ###
###################
"""
Tic-tac-toe (or Noughts and crosses, Xs and Os) is a game for two players, X and O, who take
turns marking the spaces in a 3×3 grid. The player who succeeds in placing three respective marks
in a horizontal, vertical, or diagonal row wins the game.
The simplicity of Tic-tac-toe makes it ideal as a pedagogical tool for teaching the concepts
of good sportsmanship and the branch of artificial intelligence that deals with the searching of
game trees. It is straightforward to write a computer program to play Tic-tac-toe perfectly.
The game can be generalized to an m,n,k-game in which two players alternate placing stones of
their own color on an m×n board, with the goal of getting k of their own color in a row. Tic-tac-toe
is the (3,3,3)-game.
Despite its apparent simplicity, Tic-tac-toe requires detailed analysis to determine even some
elementary combinatory facts, the most interesting of which are the number of possible games and the
number of possible positions. A position is merely a state of the board, while a game usually refers
to the way a terminal position is obtained.
"""
from string import *
from random import *
import itertools
import math
####################
## MAIN VARIABLES ##
####################
Player_1 = 'x' # player 1's mark
Player_2 = 'o' # player 2's mark
A = 'A' # these just make it easier to keep referring to 'A', 'B' and 'C'
B = 'B'
C = 'C'
#####################
## State variables ##
#####################
EMPTY = ' '
Table = [[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY]]
current = randint(1, 2)
#########################
### Coordinate system ###
#########################
def square(row, col): # squares are represented as tuples of (row, col).
return (row, col) # rows are numbered 1 thru 3, cols 'A' thru 'C'.
def square_row(square): # these two functions save us the hassle of using
return square[0] # index values in our code, e.g. square[0]...
def square_col(square): # from this point on, i should never directly use
return square[1] # tuples when working with squares.
def get_square(square):
row_i = square_row(square) - 1
col_i = ord(square_col(square)) - ord(A)
return Table[row_i][col_i] # note how this and set_square are the ONLY
# functions which directly use board!
def set_square(square, mark):
row_i = square_row(square) - 1
col_i = ord(square_col(square)) - ord(A)
Table[row_i][col_i] = mark # note how this and get_square are the ONLY
def get_row(row):
return [get_square((row, A)), get_square((row, B)), get_square((row, C))]
def get_column(col):
return [get_square((1, col)), get_square((2, col)), get_square((3, col))]
def get_diagonal(corner_square):
if corner_square == (1, A) or corner_square == (3, C):
return [get_square((1, A)), get_square((2, B)), get_square((3, C))]
else:
return [get_square((1, C)), get_square((2, B)), get_square((3, A))]
def get_mark(player):
if player == 1:
return Player_1
else:
return Player_2
def all_squares_filled():
for row in range(1, 4): # range(1, 4) returns the list [1, 2, 3]
if EMPTY in get_row(row):
return False # this row contains an empty square, we know enough
return True # no empty squares found, all squares are filled
def player_has_won(player):
MARK = get_mark(player)
win = [MARK, MARK, MARK]
if get_row(1) == win or get_row(2) == win or get_row(3) == win:
return True
if get_column(A) == win or get_column(B) == win or get_column(C) == win:
return True
if get_diagonal((1, A)) == win or get_diagonal((1, C)) == win:
return True
return False
def draw_board_straight():
A1, A2, A3 = get_square((1, A)), get_square((2, A)), get_square((3, A))
B1, B2, B3 = get_square((1, B)), get_square((2, B)), get_square((3, B))
C1, C2, C3 = get_square((1, C)), get_square((2, C)), get_square((3, C))
lines = []
lines.append("")
lines.append(" " + A + " " + B + " " + C + " ")
lines.append(" ")
lines.append("1 " + A1 + " | " + B1 + " | " + C1 + " ")
lines.append(" ---+---+---")
lines.append("2 " + A2 + " | " + B2 + " | " + C2 + " ")
lines.append(" ---+---+---")
lines.append("3 " + A3 + " | " + B3 + " | " + C3 + " ")
lines.append("")
return str.join(str(lines), '\n') # the '\n' represents a newline
def draw_board_slanted():
A1, A2, A3 = get_square((1, A)), get_square((2, A)), get_square((3, A))
B1, B2, B3 = get_square((1, B)), get_square((2, B)), get_square((3, B))
C1, C2, C3 = get_square((1, C)), get_square((2, C)), get_square((3, C))
lines = []
lines.append("")
lines.append(" " + A + " " + B + " " + C + " ")
lines.append(" ")
lines.append(" 1 " + A1 + " / " + B1 + " / " + C1 + " ")
lines.append(" ---/---/--- ")
lines.append(" 2 " + A2 + " / " + B2 + " / " + C2 + " ")
lines.append(" ---/---/--- ")
lines.append("3 " + A3 + " / " + B3 + " / " + C3 + " ")
lines.append("")
return str.join(str(lines), '\n')
def draw_board():
return draw_board_slanted()
def reset_main_board():
for row in (1, 2, 3):
for col in (A, B, C):
set_square(square(row, col), EMPTY)
def play():
global current
reset_main_board()
current = randint(1, 2)
print ("Tic-Tac-Toe!")
print
player1_name = input("Player 1, what is your name? ")
player2_name = input("Player 2, what is your name? ")
def get_name(player):
if player == 1:
return player1_name
else:
return player2_name
print
print ("Welcome,", player1_name, "and", player2_name + "!")
print (player1_name, "will be", Player_1 + ", and", player2_name, "will be", Player_2 + ".")
print ("By random decision,", get_name(current), "will go first.")
print
input("[Press enter when ready to play.] ") # just waiting for them to press enter
print (draw_board())
while not all_squares_filled():
choice = input(get_name(current) + ", which square? (e.g. 2B, 2b, B2 or b2) ")
if len(choice) != 2:
print ("That's not a square. You must enter a square like b2, or 3C.")
print
continue
if choice[0] not in ["1", "2", "3"] and str.upper(choice[0]) not in [A, B, C]:
print ("The first character must be a row (1, 2 or 3) or column (A, B or C).")
print
continue
if choice[1] not in ["1", "2", "3"] and str.upper(choice[1]) not in [A, B, C]:
print ("The second character must be a row (1, 2 or 3) or column (A, B or C).")
print
continue
if choice[0] in ["1", "2", "3"] and choice[1] in ["1", "2", "3"]:
print ("You entered two rows! You must enter one row and one column (A, B or C).")
print
continue
if str.upper(choice[0]) in [A, B, C] and str.upper(choice[1]) in [A, B, C]:
print ("You entered two columns! You must enter one row (1, 2 or 3) and one column.")
print
continue
if choice[0] in ["1", "2", "3"]:
row = int(choice[0])
col = str.upper(choice[1])
else:
row = int(choice[1])
col = str.upper(choice[0])
choice = square(row, col) # make this into a (row, col) tuple
if get_square(choice) != EMPTY:
print ("Sorry, that square is already marked.")
print
continue
set_square(choice, get_mark(current))
print (draw_board())
if player_has_won(current):
print ("Congratulations", get_name(current), "-- you win!")
print
break
if all_squares_filled():
print ("Cats game!", player1_name, "and", player2_name, "draw.")
print
break
current = 3 - current # sets 1 to 2 and 2 to 1
print ("GAME IS OVER")
print
if __name__ == "__main__":
continue_playing = True
while continue_playing:
play()
again = str.lower(input("Play again? (y/n) "))
print
print
print
if again != "y":
continue_playing = False
print ("Thanks for playing!")
print
|
gpl-3.0
| -7,830,777,343,375,921,000 | 37.620536 | 101 | 0.539475 | false | 3.309487 | false | false | false |
jpetto/bedrock
|
bedrock/firefox/helpers.py
|
1
|
8778
|
from collections import OrderedDict
from django.core.cache import cache
from django.conf import settings
import jingo
import jinja2
from bedrock.firefox.models import FirefoxOSFeedLink
from bedrock.firefox.firefox_details import firefox_desktop, firefox_android, firefox_ios
from bedrock.base.urlresolvers import reverse
from lib.l10n_utils import get_locale
def android_builds(channel, builds=None):
builds = builds or []
variations = OrderedDict([
('api-9', 'Gingerbread'),
('api-15', 'Ice Cream Sandwich+'),
('x86', 'x86'),
])
if channel == 'alpha':
for type, arch_pretty in variations.iteritems():
link = firefox_android.get_download_url('alpha', type)
builds.append({'os': 'android',
'os_pretty': 'Android',
'os_arch_pretty': 'Android %s' % arch_pretty,
'arch': 'x86' if type == 'x86' else 'armv7up %s' % type,
'arch_pretty': arch_pretty,
'download_link': link})
else:
link = firefox_android.get_download_url(channel)
builds.append({'os': 'android',
'os_pretty': 'Android',
'download_link': link})
return builds
def ios_builds(channel, builds=None):
builds = builds or []
link = firefox_ios.get_download_url(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': link})
return builds
@jingo.register.function
@jinja2.contextfunction
def download_firefox(ctx, channel='release', small=False, icon=True,
platform='all', dom_id=None, locale=None, simple=False,
force_direct=False, force_full_installer=False,
force_funnelcake=False, check_old_fx=False):
""" Output a "download firefox" button.
:param ctx: context from calling template.
:param channel: name of channel: 'release', 'beta' or 'alpha'.
:param small: Display the small button if True.
:param icon: Display the Fx icon on the button if True.
:param platform: Target platform: 'desktop', 'android', 'ios', or 'all'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param simple: Display button with text only if True. Will not display
icon or privacy/what's new/systems & languages links. Can be used
in conjunction with 'small'.
:param force_direct: Force the download URL to be direct.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
:param force_funnelcake: Force the download version for en-US Windows to be
'latest', which bouncer will translate to the funnelcake build.
:param check_old_fx: Checks to see if the user is on an old version of
Firefox and, if true, changes the button text from 'Free Download'
to 'Update your Firefox'. Must be used in conjunction with
'simple' param being true.
:return: The button html.
"""
show_desktop = platform in ['all', 'desktop']
show_android = platform in ['all', 'android']
show_ios = platform in ['all', 'ios']
alt_channel = '' if channel == 'release' else channel
locale = locale or get_locale(ctx['request'])
funnelcake_id = ctx.get('funnelcake_id', False)
dom_id = dom_id or 'download-button-%s-%s' % (
'desktop' if platform == 'all' else platform, channel)
l_version = firefox_desktop.latest_builds(locale, channel)
if l_version:
version, platforms = l_version
else:
locale = 'en-US'
version, platforms = firefox_desktop.latest_builds('en-US', channel)
# Gather data about the build for each platform
builds = []
if show_desktop:
for plat_os, plat_os_pretty in firefox_desktop.platform_labels.iteritems():
# Windows 64-bit builds are not available on the ESR channel yet
if plat_os == 'win64' and channel in ['esr', 'esr_next']:
continue
# Fallback to en-US if this plat_os/version isn't available
# for the current locale
_locale = locale if plat_os_pretty in platforms else 'en-US'
# And generate all the info
download_link = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=force_direct,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
# If download_link_direct is False the data-direct-link attr
# will not be output, and the JS won't attempt the IE popup.
if force_direct:
# no need to run get_download_url again with the same args
download_link_direct = False
else:
download_link_direct = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=True,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
if download_link_direct == download_link:
download_link_direct = False
builds.append({'os': plat_os,
'os_pretty': plat_os_pretty,
'download_link': download_link,
'download_link_direct': download_link_direct})
if show_android:
builds = android_builds(channel, builds)
if show_ios:
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': firefox_ios.get_download_url()})
# Get the native name for current locale
langs = firefox_desktop.languages
locale_name = langs[locale]['native'] if locale in langs else locale
data = {
'locale_name': locale_name,
'version': version,
'product': 'firefox-%s' % platform,
'builds': builds,
'id': dom_id,
'small': small,
'simple': simple,
'channel': alt_channel,
'show_desktop': show_desktop,
'show_android': show_android,
'show_ios': show_ios,
'icon': icon,
'check_old_fx': check_old_fx and simple,
}
html = jingo.render_to_string(ctx['request'],
'firefox/includes/download-button.html',
data)
return jinja2.Markup(html)
@jingo.register.function
def firefox_url(platform, page, channel=None):
"""
Return a product-related URL like /firefox/all/ or /mobile/beta/notes/.
Examples
========
In Template
-----------
{{ firefox_url('desktop', 'all', 'organizations') }}
{{ firefox_url('desktop', 'sysreq', channel) }}
{{ firefox_url('android', 'notes') }}
"""
kwargs = {}
# Tweak the channel name for the naming URL pattern in urls.py
if channel == 'release':
channel = None
if channel == 'alpha':
if platform == 'desktop':
channel = 'developer'
if platform == 'android':
channel = 'aurora'
if channel == 'esr':
channel = 'organizations'
if channel:
kwargs['channel'] = channel
if platform != 'desktop':
kwargs['platform'] = platform
# Firefox for Android and iOS have the system requirements page on SUMO
if platform in ['android', 'ios'] and page == 'sysreq':
return settings.FIREFOX_MOBILE_SYSREQ_URL
return reverse('firefox.%s' % page, kwargs=kwargs)
@jingo.register.function
def firefox_os_feed_links(locale, force_cache_refresh=False):
if locale in settings.FIREFOX_OS_FEED_LOCALES:
cache_key = 'firefox-os-feed-links-' + locale
if not force_cache_refresh:
links = cache.get(cache_key)
if links:
return links
links = list(
FirefoxOSFeedLink.objects.filter(locale=locale).order_by(
'-id').values_list('link', 'title')[:10])
cache.set(cache_key, links)
return links
elif '-' in locale:
return firefox_os_feed_links(locale.split('-')[0])
@jingo.register.function
def firefox_os_blog_link(locale):
try:
return settings.FXOS_PRESS_BLOG_LINKS[locale]
except KeyError:
if '-' in locale:
return firefox_os_blog_link(locale.split('-')[0])
else:
return None
|
mpl-2.0
| 5,004,060,017,684,913,000 | 35.728033 | 89 | 0.583276 | false | 4.00639 | false | false | false |
satish-avninetworks/murano
|
murano/dsl/murano_package.py
|
1
|
7758
|
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import weakref
import semantic_version
import six
from yaql.language import specs
from yaql.language import utils
from murano.dsl import constants
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.dsl import helpers
from murano.dsl import meta as dslmeta
from murano.dsl import murano_object
from murano.dsl import murano_type
from murano.dsl import namespace_resolver
from murano.dsl import principal_objects
from murano.dsl import yaql_integration
class MuranoPackage(dsl_types.MuranoPackage, dslmeta.MetaProvider):
def __init__(self, package_loader, name, version=None,
runtime_version=None, requirements=None, meta=None):
super(MuranoPackage, self).__init__()
self._package_loader = weakref.proxy(package_loader)
self._name = name
self._meta = None
self._version = helpers.parse_version(version)
self._runtime_version = helpers.parse_version(runtime_version)
self._requirements = {
name: semantic_version.Spec('==' + str(self._version.major))
}
if name != constants.CORE_LIBRARY:
self._requirements[constants.CORE_LIBRARY] = \
semantic_version.Spec('==0')
self._classes = {}
self._imported_types = {object, murano_object.MuranoObject}
for key, value in six.iteritems(requirements or {}):
self._requirements[key] = helpers.parse_version_spec(value)
self._load_queue = {}
self._native_load_queue = {}
if self.name == constants.CORE_LIBRARY:
principal_objects.register(self)
self._package_class = self._create_package_class()
self._meta = dslmeta.MetaData(
meta, dsl_types.MetaTargets.Package, self._package_class)
@property
def package_loader(self):
return self._package_loader
@property
def name(self):
return self._name
@property
def version(self):
return self._version
@property
def runtime_version(self):
return self._runtime_version
@property
def requirements(self):
return self._requirements
@property
def classes(self):
return set(self._classes.keys()).union(
self._load_queue.keys()).union(self._native_load_queue.keys())
def get_resource(self, name):
raise NotImplementedError('resource API is not implemented')
# noinspection PyMethodMayBeStatic
def get_class_config(self, name):
return {}
def _register_mpl_classes(self, data, name=None):
type_obj = self._classes.get(name)
if type_obj is not None:
return type_obj
if callable(data):
data = data()
data = helpers.list_value(data)
unnamed_class = None
last_ns = {}
for cls_data in data:
last_ns = cls_data.setdefault('Namespaces', last_ns.copy())
if len(cls_data) == 1:
continue
cls_name = cls_data.get('Name')
if not cls_name:
if unnamed_class:
raise exceptions.AmbiguousClassName(name)
unnamed_class = cls_data
else:
ns_resolver = namespace_resolver.NamespaceResolver(last_ns)
cls_name = ns_resolver.resolve_name(cls_name)
if cls_name == name:
type_obj = murano_type.create(
cls_data, self, cls_name, ns_resolver)
self._classes[name] = type_obj
else:
self._load_queue.setdefault(cls_name, cls_data)
if type_obj is None and unnamed_class:
unnamed_class['Name'] = name
return self._register_mpl_classes(unnamed_class, name)
return type_obj
def _register_native_class(self, cls, name):
if cls in self._imported_types:
return self._classes[name]
try:
m_class = self.find_class(name, False)
except exceptions.NoClassFound:
m_class = self._register_mpl_classes({'Name': name}, name)
m_class.extension_class = cls
for method_name in dir(cls):
if method_name.startswith('_'):
continue
method = getattr(cls, method_name)
if not any((
helpers.inspect_is_method(cls, method_name),
helpers.inspect_is_static(cls, method_name),
helpers.inspect_is_classmethod(cls, method_name))):
continue
method_name_alias = (getattr(
method, '__murano_name', None) or
specs.convert_function_name(
method_name, yaql_integration.CONVENTION))
m_class.add_method(method_name_alias, method, method_name)
self._imported_types.add(cls)
return m_class
def register_class(self, cls, name=None):
if inspect.isclass(cls):
name = name or getattr(cls, '__murano_name', None) or cls.__name__
if name in self._classes:
self._register_native_class(cls, name)
else:
self._native_load_queue.setdefault(name, cls)
elif isinstance(cls, dsl_types.MuranoType):
self._classes[cls.name] = cls
elif name not in self._classes:
self._load_queue[name] = cls
def find_class(self, name, search_requirements=True):
payload = self._native_load_queue.pop(name, None)
if payload is not None:
return self._register_native_class(payload, name)
payload = self._load_queue.pop(name, None)
if payload is not None:
result = self._register_mpl_classes(payload, name)
if result:
return result
result = self._classes.get(name)
if result:
return result
if search_requirements:
pkgs_for_search = []
for package_name, version_spec in six.iteritems(
self._requirements):
if package_name == self.name:
continue
referenced_package = self._package_loader.load_package(
package_name, version_spec)
try:
return referenced_package.find_class(name, False)
except exceptions.NoClassFound:
pkgs_for_search.append(referenced_package)
continue
raise exceptions.NoClassFound(
name, packages=pkgs_for_search + [self])
raise exceptions.NoClassFound(name, packages=[self])
@property
def context(self):
return None
def _create_package_class(self):
ns_resolver = namespace_resolver.NamespaceResolver(None)
return murano_type.MuranoClass(
ns_resolver, self.name, self, utils.NO_VALUE)
def get_meta(self, context):
if not self._meta:
return []
return self._meta.get_meta(context)
def __repr__(self):
return 'MuranoPackage({name})'.format(name=self.name)
|
apache-2.0
| 3,568,733,459,473,349,000 | 35.252336 | 78 | 0.59603 | false | 4.10911 | false | false | false |
DevHugo/zds-site
|
zds/utils/tutorials.py
|
1
|
2669
|
# coding: utf-8
import os
# Used for indexing tutorials, we need to parse each manifest to know which content have been published
class GetPublished:
published_part = []
published_chapter = []
published_extract = []
def __init__(self):
pass
@classmethod
def get_published_content(cls):
# If all array are empty load_it
if not len(GetPublished.published_part) and \
not len(GetPublished.published_chapter) and \
not len(GetPublished.published_extract):
# Get all published tutorials
from zds.tutorial.models import Tutorial
tutorials_database = Tutorial.objects.filter(sha_public__isnull=False).all()
for tutorial in tutorials_database:
# Load Manifest
json = tutorial.load_json_for_public()
# Parse it
GetPublished.load_tutorial(json)
return {"parts": GetPublished.published_part,
"chapters": GetPublished.published_chapter,
"extracts": GetPublished.published_extract}
@classmethod
def load_tutorial(cls, json):
# Load parts, chapter and extract
if 'parts' in json:
for part_json in json['parts']:
# If inside of parts we have chapters, load it
GetPublished.load_chapters(part_json)
GetPublished.load_extracts(part_json)
GetPublished.published_part.append(part_json['pk'])
GetPublished.load_chapters(json)
GetPublished.load_extracts(json)
@classmethod
def load_chapters(cls, json):
if 'chapters' in json:
for chapters_json in json['chapters']:
GetPublished.published_chapter.append(chapters_json['pk'])
GetPublished.load_extracts(chapters_json)
return GetPublished.published_chapter
@classmethod
def load_extracts(cls, json):
if 'extracts' in json:
for extract_json in json['extracts']:
GetPublished.published_extract.append(extract_json['pk'])
return GetPublished.published_extract
def get_blob(tree, chemin):
for blob in tree.blobs:
try:
if os.path.abspath(blob.path) == os.path.abspath(chemin):
data = blob.data_stream.read()
return data.decode('utf-8')
except (OSError, IOError):
return ""
if len(tree.trees) > 0:
for atree in tree.trees:
result = get_blob(atree, chemin)
if result is not None:
return result
return None
else:
return None
|
gpl-3.0
| -8,591,455,257,756,504,000 | 30.034884 | 103 | 0.59423 | false | 4.284109 | false | false | false |
Hubert51/AutoGrading
|
learning/number_recognization/test.py
|
1
|
1250
|
from pytesseract import image_to_string
from PIL import Image
import cv2
import numpy
import sys
if __name__ == '__main__':
f = open("test1.txt")
f = f.read()
for element in f:
str1 = element
position = ((712, 571), (725, 587))
dh = position[1][1] - position[0][1]
upper = position[0][1] - 2 * dh
lower = position[1][1] + int(3.5 * dh)
left = position[1][0]
print(upper,lower, left)
img = cv2.imread('answerSheet_with_name.png')
#image = Image.open('answerSheet_with_name.png')
img = img[upper:lower, left:img[1].size]
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray,(5,5),0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,6)
cv2.imshow("hello", img)
################# Now finding Contours ###################
img,contours,hierarchy = cv2.findContours(thresh,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(img, contours, -1, (0, 0, 255),1)
im = Image.fromarray(img, 'RGB')
file = open("image_to_string.txt", "w")
# box = image_to_string(image).split('\n')
file.write(image_to_string(im))
#file.write(image_to_string(image))
file.close()
|
mit
| -715,447,482,893,040,000 | 26.777778 | 98 | 0.6064 | false | 2.82167 | false | false | false |
bblais/Tech-SIE
|
Estimating_Proportion/Estimating_Proportion.py
|
1
|
4755
|
# coding: utf-8
# #Statistical Inference for Everyone: Technical Supplement
#
#
#
# This document is the technical supplement, for instructors, for [Statistical Inference for Everyone], the introductory statistical inference textbook from the perspective of "probability theory as logic".
#
# <img src="http://web.bryant.edu/~bblais/images/Saturn_with_Dice.png" align=center width = 250px />
#
# [Statistical Inference for Everyone]: http://web.bryant.edu/~bblais/statistical-inference-for-everyone-sie.html
#
# ## Estimating a Proportion
#
# $$\newcommand{\twocvec}[2]{\left(\begin{array}{c}
# #1 \\\\ #2
# \end{array}\right)}
# \newcommand{\nchoosek}[2]{\twocvec{#1}{#2}}
# $$
#
# If $\theta$ is the model representing the probability, $\theta$, of the coin
# landing on heads (and $1-\theta$ is the probability of landing on tails), we
# need to make an estimate of probability of model $\theta$ being true given the
# data, which will consist of $N$ flips of which $h$ are heads.
#
# Bayes rule is:
# \begin{eqnarray}
# p(\theta|D,I) &=& \frac{p(D|\theta,I)p(\theta|I)}{p(D|I)} =
# \frac{p(D|\theta,I)p(\theta,I)}{\sum_\theta p(D|\theta,I)p(\theta|I)}
# \end{eqnarray}
#
# Thus, the probability of a particular model $\theta$ being true is the product
# of the probability of the observed data ($h$ heads in $N$ flips) given the
# model $\theta$ and the prior probability of the model $\theta$ being true
# before we even look at the data, divided by the probability of the data itself
# over all models.
#
# The prior probability of model $\theta$ will be assumed to be uniform (from
# maximum entropy considerations). The probability, $\theta$, ranges from 0 to
# 1, to the prior is
# \begin{eqnarray}
# p(\theta|I) = 1
# \end{eqnarray}
#
# The probability of the data given the random model, is just the binomial
# distribution:
#
# \begin{eqnarray}
# p(D|\theta)=\nchoosek{N}{h} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
# The probability of the data, $p(D|I)$, is found by summing (or in this case
# integrating) $p(D|\theta,I)p(\theta|I)$ for all $\theta$:
#
# \begin{eqnarray}
# p(D|I) &=& \int_0^1 \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \cdot 1 d\theta
# \\\\
# &=&\frac{N!}{h!(N-h)!} \frac{h!(N-h)!}{(N+1)!} = \frac{1}{N+1}
# \end{eqnarray}
#
# Now the probability of model $\theta$ being true, given the data, is just
#
# \begin{eqnarray}
# p(\theta|D,I)&=& (N+1) \cdot \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \\
# &=& \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
#
# ### Max, Mean, Variance
#
# The model with the maximum probability is found by maximizing $p(\theta|D,I)$
# w.r.t. $\theta$:
#
# \begin{eqnarray}
# \frac{dP(\theta|D,I)}{d\theta} &=& 0 = \frac{(N+1)!}{h!(N-h)!} \left(
# -(N-h) \theta^h (1-\theta)^{N-h-1} + h \theta^{h-1} (1-\theta)^{N-h} \right) \\\\
# (N-h) \theta^h (1-\theta)^{N-h-1} &=& h \theta^{h-1} (1-\theta)^{N-h} \\\\
# \theta(N-h) &=& (1-\theta) h = h-\theta h = N\theta-\theta h \\\\
# \theta&=&\frac{h}{N} \;\;\;\;\;\surd
# \end{eqnarray}
#
# The average and the standard deviation is also straightforward.
#
#
# \begin{eqnarray}
# \bar{\theta} &=& \int_0^1 \theta \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=& \frac{(N+1)!}{h!(N-h)!} \int_0^1 \theta^{h+1} (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+1)!(N-h)!}{(N+2)!} \\\\
# &=&\frac{h+1}{N+2} \\\\
# \bar{\theta^2} &=& \int_0^1 \theta^2 \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+2)!(N-h)!}{(N+3)!} \\\\
# &=&\frac{(h+1)(h+2)}{(N+2)(N+3)} \\\\
# \sigma^2 &=& \bar{\theta^2} - \bar{\theta}^2 = \frac{(h+1)(h+2)}{(N+2)(N+3)} -
# \frac{(h+1)(h+1)}{(N+2)(N+2)} \\\\
# &=&\frac{(h+1)(N-h+1)}{(N+2)^2(N+3)} \\\\
# &=& \frac{(h+1)}{(N+2)}\left( \frac{n+2}{n+2} - \frac{h+1}{N+2}\right)
# \frac{1}{N+3} \\\\
# &=& \bar{\theta}(1-\bar{\theta})\frac{1}{N+3}
# \end{eqnarray}
#
# ### An Approximation for the Variance
#
# If $f=h/N$ is the actual fraction of heads observed, then the variance above
# can be written as
# \begin{eqnarray}
# \sigma^2 &=&\frac{(fN+1)(N-fN+1)}{(N+2)^2(N+3)} \\\\
# \mbox{(for large $N$)}&\approx& \frac{(fN+1)(N-fN)}{N^3}
# =\frac{(fN+1)(1-f)}{N^2} \\\\
# \mbox{(for large $fN$)}&\approx& \frac{(fN)(N-fN)}{N^2} = \frac{f(1-f)}{N} \\\\
# \sigma^2&\approx& \frac{f(1-f)}{N}
# \end{eqnarray}
#
# In this limit, the distribution (beta distribution) can be approximated with a
# Gaussian.
#
# In[11]:
# ---------------------
# In[8]:
from IPython.core.display import HTML
def css_styling():
styles = open("../styles/custom.css", "r").read()
return HTML(styles)
css_styling()
|
mit
| 8,721,158,606,299,497,000 | 33.708029 | 206 | 0.578549 | false | 2.258907 | false | false | false |
wylee/django-local-settings
|
src/local_settings/util.py
|
1
|
5070
|
import importlib
import io
import os
import dotenv
NO_DEFAULT = type(
"NO_DEFAULT",
(),
{
"__nonzero__": (lambda self: False), # Python 2
"__bool__": (lambda self: False), # Python 3
"__str__": (lambda self: self.__class__.__name__),
"__repr__": (lambda self: str(self)),
"__copy__": (lambda self: self),
},
)()
def get_file_name():
"""Get local settings file from environ or discover it.
If the ``LOCAL_SETTINGS_FILE`` environment variable is set, its
value is returned directly.
Otherwise, the current working directory is searched for
`local.{ext}` for each file extension handled by each loading
:mod:`strategy`. Note that the search is done in alphabetical order
so that if ``local.cfg`` and ``local.yaml`` both exist, the former
will be returned.
Returns:
str: File name if set via environ or discovered
None: File name isn't set and wasn't discovered
"""
file_name = os.environ.get("LOCAL_SETTINGS_FILE")
if file_name:
return file_name
cwd = os.getcwd()
default_file_names = get_default_file_names()
for file_name in default_file_names:
file_name = os.path.join(cwd, file_name)
if os.path.exists(file_name):
return file_name
def get_default_file_names():
"""Get default file names for all loading strategies, sorted."""
from .strategy import get_file_type_map # noqa: Avoid circular import
return sorted(f"local.{ext}" for ext in get_file_type_map())
def parse_file_name_and_section(
file_name, section=None, extender=None, extender_section=None
):
"""Parse file name and (maybe) section.
File names can be absolute paths, relative paths, or asset
specs::
/home/user/project/local.cfg
local.cfg
some.package:local.cfg
File names can also include a section::
some.package:local.cfg#dev
If a ``section`` is passed, it will take precedence over a
section parsed out of the file name.
"""
if "#" in file_name:
file_name, parsed_section = file_name.rsplit("#", 1)
else:
parsed_section = None
if ":" in file_name:
file_name = asset_path(file_name)
if extender:
if not file_name:
# Extended another section in the same file
file_name = extender
elif not os.path.isabs(file_name):
# Extended by another file in the same directory
file_name = abs_path(file_name, relative_to=os.path.dirname(extender))
if section:
pass
elif parsed_section:
section = parsed_section
elif extender_section:
section = extender_section
else:
section = None
return file_name, section
# Path utilities
def abs_path(path, relative_to=None):
"""Make path absolute and normalize it."""
if os.path.isabs(path):
path = os.path.normpath(path)
elif ":" in path:
path = asset_path(path)
else:
path = os.path.expanduser(path)
if relative_to:
path = os.path.join(relative_to, path)
path = os.path.abspath(path)
path = os.path.normpath(path)
return path
def asset_path(path):
"""Get absolute path from asset spec and normalize it."""
if ":" in path:
package_name, rel_path = path.split(":", 1)
else:
package_name, rel_path = path, ""
try:
package = importlib.import_module(package_name)
except ImportError:
raise ValueError(
f"Could not get asset path for {path}; could not import "
f"package: {package_name}"
)
if not hasattr(package, "__file__"):
raise ValueError("Can't compute path relative to namespace package")
package_path = os.path.dirname(package.__file__)
if rel_path:
path = os.path.join(package_path, rel_path)
path = os.path.normpath(path)
return path
def dotenv_path(path=None, relative_to=None, file_name=".env"):
"""Get .env path.
If a path is specified, convert it to an absolute path. Otherwise,
use the default, "./.env".
.. note:: By default, the dotenv package discovers the default .env
file relative to the call site, so we have to tell it use CWD.
"""
if path:
path = abs_path(path, relative_to)
else:
path = dotenv.find_dotenv(filename=file_name, usecwd=True)
return path
def load_dotenv(path=None, relative_to=None, file_name=".env"):
"""Load vars from dotenv file into environ."""
path = dotenv_path(path, relative_to, file_name)
dotenv.load_dotenv(path)
# These TTY functions were copied from Invoke
def is_a_tty(stream):
if hasattr(stream, "isatty") and callable(stream.isatty):
return stream.isatty()
elif has_fileno(stream):
return os.isatty(stream.fileno())
return False
def has_fileno(stream):
try:
return isinstance(stream.fileno(), int)
except (AttributeError, io.UnsupportedOperation):
return False
|
mit
| -6,834,773,356,538,003,000 | 26.258065 | 82 | 0.622091 | false | 3.843821 | false | false | false |
ngageoint/scale
|
scale/data/models.py
|
1
|
24039
|
"""Defines the database models for datasets"""
from __future__ import absolute_import, unicode_literals
import copy
import logging
from collections import namedtuple
import django.contrib.postgres.fields
from django.db import models, transaction
from django.db.models import Q, Count
from data.data import data_util
from data.data.json.data_v6 import convert_data_to_v6_json, DataV6
from data.data.exceptions import InvalidData
from data.data.value import FileValue
from data.dataset.dataset import DataSetDefinition
from data.dataset.json.dataset_v6 import convert_definition_to_v6_json, DataSetDefinitionV6
from data.exceptions import InvalidDataSetDefinition, InvalidDataSetMember
from data.serializers import DataSetFileSerializerV6, DataSetMemberSerializerV6
from storage.models import ScaleFile
from util import rest as rest_utils
from util.database import alphabetize
logger = logging.getLogger(__name__)
DataSetValidation = namedtuple('DataSetValidation', ['is_valid', 'errors', 'warnings'])
# DataSetKey = namedtuple('DataSetKey', ['name', 'version'])
class DataSetManager(models.Manager):
"""Provides additional methods for handling datasets"""
def create_dataset_v6(self, definition, title=None, description=None):
"""Creates and returns a new dataset for the given name/title/description/definition/version??
:param definition: Parameter definition of the dataset
:type definition: :class:`data.dataset.dataset.DataSetDefinition`
:param title: Optional title of the dataset
:type title: string
:param description: Optional description of the dataset
:type description: string
:returns: The new dataset
:rtype: :class:`data.models.DataSet`
:raises :class:`data.exceptions.InvalidDataSet`: If a give dataset has an invalid value
"""
if not definition:
definition = DataSetDefinition(definition={})
dataset = DataSet()
dataset.title = title
dataset.description = description
dataset.definition = definition.get_dict()
dataset.save()
return dataset
def get_details_v6(self, dataset_id):
"""Gets additional details for the given dataset id
:returns: The full dataset for the given id
:rtype: :class:`data.models.DataSet`
"""
ds = DataSet.objects.get(pk=dataset_id)
ds.files = DataSetFile.objects.get_dataset_files(ds.id)
return ds
def get_datasets_v6(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Handles retrieving datasets - possibly filtered and ordered
:returns: The list of datasets that match the given filters
:rtype: [:class:`data.models.DataSet`]
"""
return self.filter_datasets(started=started, ended=ended, dataset_ids=dataset_ids, keywords=keywords, order=order)
def filter_datasets(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Returns a query for dataset models that filters on the given fields
:param started: Query datasets created after this amount of time.
:type started: :class:`datetime.datetime`
:param ended: Query datasets created before this amount of time.
:type ended: :class:`datetime.datetime`
:param dataset_ids: Query datasets assciated with the given id(s)
:type dataset_ids: :func:`list`
:param keywords: Query datasets with title or description matching one of the specified keywords
:type keywords: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
:returns: The dataset query
:rtype: :class:`django.db.models.QuerySet`
"""
# Fetch a list of the datasets
datasets = self.all()
# Apply time range filtering
if started:
datasets = datasets.filter(created__gte=started)
if ended:
datasets = datasets.filter(created__lte=ended)
# Apply additional filters
if dataset_ids:
datasets = datasets.filter(id__in=dataset_ids)
# Execute a sub-query that returns distinct job type names that match the provided filter arguments
if keywords:
key_query = Q()
for keyword in keywords:
key_query |= Q(title__icontains=keyword)
key_query |= Q(description__icontains=keyword)
datasets = datasets.filter(key_query)
# Apply sorting
if order:
ordering = alphabetize(order, DataSet.ALPHABETIZE_FIELDS)
datasets = datasets.order_by(*ordering)
else:
datasets = datasets.order_by('id')
for ds in datasets:
files = DataSetFile.objects.get_file_ids(dataset_ids=[ds.id])
ds.files = len(files)
return datasets
def validate_dataset_v6(self, definition, title=None, description=None):
"""Validates the given dataset definiton
:param definition: The dataset definition
:type definition: dict
:returns: The dataset validation
:rtype: :class:`datset.models.DataSetValidation`
"""
is_valid = True
errors = []
warnings = []
dataset_definition = None
try:
dataset_definition = DataSetDefinitionV6(definition=definition, do_validate=True)
except InvalidDataSetDefinition as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def get_dataset_files(self, dataset_id):
"""Returns the files associated with the given dataset
:returns: The list of DataSetFiles matching the file_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.get_dataset_files(dataset_id=dataset_id)
return files
def get_dataset_members(self, dataset_id):
"""Returns the members associated with the given dataset_id
:returns: The list of DataSetMembers
:rtype: [:class:`data.models.DataSetMember`]
"""
dataset = self.get(pk=dataset_id)
members = DataSetMember.objects.all().filter(dataset=dataset)
return members
class DataSet(models.Model):
"""
Represents a DataSet object
:keyword name: The identifying name of the dataset used by clients for queries
:type name: :class:`django.db.models.CharField`
:keyword version: The version of the dataset
:type version: :class:`django.db.models.CharField`
:keyword version_array: The version of the dataset split into SemVer integer components (major,minor,patch,prerelease)
:type version_array: :func:`list`
:keyword title: The human-readable title of this dataset (optional)
:type title: :class:`django.db.models.CharField`
:keyword description: The description of the dataset (optional)
:type description: :class:`django.db.models.CharField`
:keyword created: Defines the created time of the dataset
:type created: :class:`django.db.models.DateTimeField`
:keyword definition: Defines the dataset
:type definition: class:`django.contrib.postgres.fields.JSONField`
"""
ALPHABETIZE_FIELDS = ['title', 'description']
title = models.CharField(blank=True, max_length=50, null=True)
description = models.TextField(blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
definition = django.contrib.postgres.fields.JSONField(default=dict)
objects = DataSetManager()
def get_definition(self):
"""Returns the dataset definition
:returns: The DataSet definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
if isinstance(self.definition, basestring):
self.definition = {}
return DataSetDefinitionV6(definition=self.definition).get_definition()
def get_v6_definition_json(self):
"""Returns the dataset definition in v6 of the JSON schema
:returns: The dataset definition in v6 of the JSON schema
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_definition_to_v6_json(self.get_definition()).get_dict())
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition json
:rtype: dict
"""
return self.definition
def get_dataset_members_json(self):
"""Returns the JSON for the associated dataset members
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
members = DataSet.objects.get_dataset_members(dataset_id=self.id)
serializer = DataSetMemberSerializerV6(members, many=True)
return serializer.data
def get_dataset_files_json(self):
"""Returns the JSON for the associated dataset files
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
files = DataSet.objects.get_dataset_files(self.id)
serializer = DataSetFileSerializerV6(files, many=True)
return serializer.data
class Meta(object):
"""meta information for the db"""
db_table = 'data_set'
class DataSetMemberManager(models.Manager):
"""Provides additional methods for handling dataset members"""
def build_data_list(self, template, data_started=None, data_ended=None, created_started=None, created_ended=None,
source_started=None, source_ended=None, source_sensor_classes=None, source_sensors=None,
source_collections=None,source_tasks=None, mod_started=None, mod_ended=None, job_type_ids=None,
job_type_names=None, job_ids=None, is_published=None, is_superseded=None, file_names=None,
job_outputs=None, recipe_ids=None, recipe_type_ids=None, recipe_nodes=None, batch_ids=None, order=None):
"""Builds a list of data dictionaries from a template and file filters
:param template: The template to fill with files found through filters
:type template: dict
:param data_started: Query files where data started after this time.
:type data_started: :class:`datetime.datetime`
:param data_ended: Query files where data ended before this time.
:type data_ended: :class:`datetime.datetime`
:param created_started: Query files created after this time.
:type created_started: :class:`datetime.datetime`
:param created_ended: Query files created before this time.
:type created_ended: :class:`datetime.datetime`
:param source_started: Query files where source collection started after this time.
:type source_started: :class:`datetime.datetime`
:param source_ended: Query files where source collection ended before this time.
:type source_ended: :class:`datetime.datetime`
:param source_sensor_classes: Query files with the given source sensor class.
:type source_sensor_classes: :func:`list`
:param source_sensor: Query files with the given source sensor.
:type source_sensor: :func:`list`
:param source_collection: Query files with the given source class.
:type source_collection: :func:`list`
:param source_tasks: Query files with the given source tasks.
:type source_tasks: :func:`list`
:param mod_started: Query files where the last modified date is after this time.
:type mod_started: :class:`datetime.datetime`
:param mod_ended: Query files where the last modified date is before this time.
:type mod_ended: :class:`datetime.datetime`
:param job_type_ids: Query files with jobs with the given type identifier.
:type job_type_ids: :func:`list`
:param job_type_names: Query files with jobs with the given type name.
:type job_type_names: :func:`list`
:keyword job_ids: Query files with a given job id
:type job_ids: :func:`list`
:param is_published: Query files flagged as currently exposed for publication.
:type is_published: bool
:param is_superseded: Query files that have/have not been superseded.
:type is_superseded: bool
:param file_names: Query files with the given file names.
:type file_names: :func:`list`
:keyword job_outputs: Query files with the given job outputs
:type job_outputs: :func:`list`
:keyword recipe_ids: Query files with a given recipe id
:type recipe_ids: :func:`list`
:keyword recipe_nodes: Query files with a given recipe nodes
:type recipe_nodes: :func:`list`
:keyword recipe_type_ids: Query files with the given recipe types
:type recipe_type_ids: :func:`list`
:keyword batch_ids: Query files with batches with the given identifiers.
:type batch_ids: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
"""
files = ScaleFile.objects.filter_files(
data_started=data_started, data_ended=data_ended,
source_started=source_started, source_ended=source_ended,
source_sensor_classes=source_sensor_classes, source_sensors=source_sensors,
source_collections=source_collections, source_tasks=source_tasks,
mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids,
job_type_names=job_type_names, job_ids=job_ids,
file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids,
recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids,
order=order)
data_list = []
try:
for f in files:
entry = copy.deepcopy(template)
file_params = entry['files']
for p in file_params:
if file_params[p] == 'FILE_VALUE':
file_params[p] = [f.id]
data_list.append(DataV6(data=entry, do_validate=True).get_data())
except (KeyError, TypeError) as ex:
raise InvalidData('INVALID_TEMPLATE', "Specified template is invalid: %s" % ex)
return data_list
def validate_data_list(self, dataset_def, data_list):
"""Validates a list of data objects against a dataset
:param dataset_def: The dataset definition the member is a part of
:type dataset_def:
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
is_valid = True
errors = []
warnings = []
for data in data_list:
try:
dataset_def.validate(data)
except (InvalidData, InvalidDataSetMember) as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def create_dataset_members(self, dataset, data_list):
"""Creates a dataset member
:param dataset: The dataset the member is a part of
:type dataset: :class:`data.models.DataSet`
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
with transaction.atomic():
dataset_members = []
datasetfiles = []
existing_scale_ids = DataSetFile.objects.get_file_ids(dataset_ids=[dataset.id])
for d in data_list:
dataset_member = DataSetMember()
dataset_member.dataset = dataset
dataset_member.data = convert_data_to_v6_json(d).get_dict()
dataset_member.file_ids = list(data_util.get_file_ids(d))
dataset_members.append(dataset_member)
datasetfiles.extend(DataSetFile.objects.create_dataset_files(dataset, d, existing_scale_ids))
existing_scale_ids.append(dataset_member.file_ids)
DataSetFile.objects.bulk_create(datasetfiles)
return DataSetMember.objects.bulk_create(dataset_members)
def get_dataset_members(self, dataset):
"""Returns dataset members for the given dataset
:returns: members for a given dataset
:rtype: QuerySet<DataSetMember>
"""
return self.all().filter(dataset=dataset).order_by('id')
def get_details_v6(self, dsm_id):
"""Gets additional details for the given dataset member id
:returns: The full dataset member for the given id
:rtype: :class:`data.models.DataSetMember`
"""
dsm = DataSetMember.objects.get(pk=dsm_id)
dsm.files = DataSetFile.objects.filter(dataset=dsm.dataset, scale_file_id__in=list(dsm.file_ids))
return dsm
class DataSetMember(models.Model):
"""
Defines the data of a dataset? contains list/descriptors of DataFiles
:keyword dataset: Refers to dataset member belongs to
:type dataset: :class:`django.db.models.ForeignKey`
:keyword data: JSON description of the data in this DataSetMember.
:type data: :class: `django.contrib.postgres.fields.JSONField(default=dict)`
:keyword created: Created Time
:type created: datetime
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
data = django.contrib.postgres.fields.JSONField(default=dict)
file_ids = django.contrib.postgres.fields.ArrayField(models.IntegerField(null=True))
created = models.DateTimeField(auto_now_add=True)
objects = DataSetMemberManager()
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
return self.dataset.get_definition()
def get_data(self):
"""Returns the data for this datasetmember
:returns: The data for this datasetmember
:rtype: :class:`data.data.data.Data`
"""
return DataV6(data=self.data, do_validate=False).get_data()
def get_v6_data_json(self):
"""Returns the data for this datasetmember as v6 json with the version stripped
:returns: The v6 JSON output data dict for this datasetmember
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_data_to_v6_json(self.get_data()).get_dict())
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_member'
class DataSetFileManager(models.Manager):
"""Manages the datasetfile model"""
def create_dataset_files(self, dataset, data, existing_scale_ids):
"""Creates dataset files for the given dataset and data"""
datasetfiles = []
for i in data.values.keys():
v = data.values[i]
if type(v) is FileValue:
for id in v.file_ids:
if id in existing_scale_ids:
continue
file = DataSetFile()
file.dataset = dataset
file.scale_file = ScaleFile.objects.get(pk=id)
file.parameter_name = i
datasetfiles.append(file)
return datasetfiles
def get_file_ids(self, dataset_ids, parameter_names=None):
"""Returns a list of the file IDs for the given datasets, optionally filtered by parameter_name.
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The list of scale file IDs
:rtype: :func:`list`
"""
query = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
query = query.filter(parameter_name__in=list(parameter_names))
return [result.scale_file_id for result in query.only('scale_file_id').distinct()]
def get_dataset_ids(self, file_ids, all_files=False):
"""Returns a list of the dataset IDs that contain the given files
:param file_ids: The ids of the files to look for
:type dataset_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The list of dataset IDs
:rtype: :func:`list`
"""
results = []
if not all_files:
query = self.all().filter(scale_file_id__in=list(file_ids)).only('dataset_id').distinct()
results = [result.dataset_id for result in query]
else:
query = self.all().filter(scale_file_id__in=list(file_ids)).values('dataset_id').annotate(total=Count('dataset_id')).order_by('total')
for result in query:
if result['total'] == len(file_ids):
results.append(result['dataset_id'])
return results
def get_files(self, dataset_ids, parameter_names=None):
"""Returns the dataset files associated with the given dataset_ids
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
files = files.filter(parameter_name__in=list(parameter_names))
return files
def get_datasets(self, file_ids, all_files=False):
"""Returns the datasets associated with the given file_id
:param file_id: The id of the associated file
:type file_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The DataSets associated with that dataset_id
:rtype: [:class:`data.models.DataSet`]
"""
dataset_ids = self.get_dataset_ids(file_ids=file_ids, all_files=all_files)
datasets = DataSet.objects.filter(id__in=dataset_ids)
return datasets
def get_dataset_files(self, dataset_id):
"""Returns the dataset files associated with the given dataset_id
:param dataset_id: The id of the associated dataset
:type dataset_id: integer
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.filter(dataset_id=dataset_id)
return files
class DataSetFile(models.Model):
"""
The actual file in a dataset member
:keyword dataset: Refers to the dataset the file is a member of
:type dataset: :class:`django.db.models.ForeignKey`
:keyword scale_file: Refers to the ScaleFile
:type scale_file: :class:`django.db.models.ForeignKey`
:keyword parameter_name: Refers to the File parameter name
:type parameter_name: :class:`django.db.models.CharField`
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
scale_file = models.ForeignKey('storage.ScaleFile', on_delete=models.PROTECT)
parameter_name = models.CharField(db_index=True, max_length=50)
objects = DataSetFileManager()
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_file'
unique_together = ("dataset", "scale_file")
|
apache-2.0
| 391,126,229,335,592,260 | 39.745763 | 146 | 0.650193 | false | 4.198952 | false | false | false |
alphatwirl/alphatwirl
|
alphatwirl/summary/Scan.py
|
1
|
1209
|
# Tai Sakuma <tai.sakuma@gmail.com>
##__________________________________________________________________||
import numpy as np
import copy
##__________________________________________________________________||
class Scan:
def __init__(self, val=None, weight=1, contents=None):
if contents is not None:
self.contents = contents
return
if val is None:
self.contents = [ ]
return
self.contents = [val]
def __add__(self, other):
contents = self.contents + other.contents
return self.__class__(contents=contents)
def __radd__(self, other):
# is called with other = 0 when e.g. sum([obj1, obj2])
if other == 0:
return self.__class__() + self
raise TypeError('unsupported: {!r} + {!r}'.format(other, self))
def __repr__(self):
return '{}(contents={})'.format(self.__class__.__name__, self.contents)
def __eq__(self, other):
return self.contents == other.contents
def __copy__(self):
contents = list(self.contents)
return self.__class__(contents=contents)
##__________________________________________________________________||
|
bsd-3-clause
| -5,268,649,975,022,638,000 | 28.487805 | 79 | 0.456576 | false | 4.428571 | false | false | false |
absperf/wagtailapproval
|
wagtailapproval/menu.py
|
1
|
3637
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import itertools
from django.contrib.auth import get_user
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy as _n
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.menu import MenuItem
from .models import ApprovalStep
def get_user_approval_items(user):
'''Get an iterable of all items pending for a user's approval.
:param User user: A user object whose groups are to be checked for
appropriate steps
:rtype: Iterable[ApprovalItem]
:returns: All the items that this user can approve or reject.
'''
if user.is_superuser:
steps = ApprovalStep.objects.all()
else:
groups = user.groups.all()
steps = ApprovalStep.objects.filter(group__in=groups)
return itertools.chain.from_iterable(
step.get_items(user) for step in steps)
class ApprovalMenuItem(MenuItem):
'''The menu item that shows in the wagtail sidebar'''
def __init__(
self, label=_('Approval'), url=reverse_lazy('wagtailapproval:index'),
classnames='icon icon-tick-inverse', order=201, **kwargs):
super(ApprovalMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is in an owned approval group'''
user = get_user(request)
# If the user is superuser, show the menu if any steps exist at all
if user.is_superuser:
return ApprovalStep.objects.exists()
groups = user.groups.all()
if ApprovalStep.objects.filter(group__in=groups).exists():
# Display the approval notification only outside of the approval
# paths
if not request.path.startswith(reverse('wagtailapproval:index')):
# Get the count of waiting approvals
waiting_approvals = sum(
1 for _ in get_user_approval_items(user))
if waiting_approvals > 0:
messages.info(
request,
_n(
'{num:d} item waiting for approval',
'{num:d} items waiting for approval',
waiting_approvals).format(num=waiting_approvals),
buttons=[
messages.button(
reverse('wagtailapproval:index'),
_('Examine Now'))
]
)
return True
return False
class ApprovalAdminMenuItem(MenuItem):
'''The admin menu item that shows in the wagtail sidebar, for
administrating entire pipelines and manually dropping items into steps.'''
def __init__(
self, label=_('Approval Admin'),
url=reverse_lazy('wagtailapproval:admin_index'),
classnames='icon icon-cog', order=200, **kwargs):
super(ApprovalAdminMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is a superuser and any ApprovalStep
objects exist.'''
user = get_user(request)
if user.is_superuser:
return ApprovalStep.objects.exists()
return False
|
bsd-2-clause
| -1,745,352,902,436,354,300 | 35.009901 | 78 | 0.587022 | false | 4.65685 | false | false | false |
lulivi/debate_bot
|
bot.py
|
1
|
5398
|
#!/usr/bin/python3 -u
# -*- coding: utf-8 -*-
import sys
import time
import telebot # Librería de la API del bot.
from telebot import types # Tipos para la API del bot.
from priv.__init__ import token as tk
bot = telebot.TeleBot(tk()) # Creamos el objeto de nuestro bot.
###############################################################################
# commands
###############################################################################
# start mensaje de bienvenida
@bot.message_handler(commands=['start'])
def command_start(m):
cid = m.chat.id
comando = m.text[7:]
if comando == 'reglas':
command_reglas(m)
else:
bot.send_message(cid,"¡Hola! Soy Debatebot.\nUsa el comando /ayuda para que te muestre mis demás comandos.\n\nEspero ser de utilidad.")
########################################
# muestra los comandos visibles
@bot.message_handler(commands=['ayuda'])
def command_ayuda(m):
bot.reply_to(m,"Guardo y doy información acerca de debates.\n/nuevo establezco el nuevo tema de debate.\n/actual muestro el tema actual de debate.\n/fin termino el debate actual.\n/reglas muestro las reglas actuales del grupo.")
########################################
# nuevo debat
@bot.message_handler(commands=['nuevo'])
def command_nuevo(m):
pos = m.text.find(" ")
cid = m.chat.id
if pos == -1:
bot.send_message(cid,m.from_user.first_name+", escribe:\n/nuevo nuevo_tema_de_debate")
else:
if get_matter(cid) == "":
set_matter(cid, m.text[pos:])
fuid = m.from_user.id
set_matter_id(cid, fuid)
bot.send_message(cid,"El tema actual se ha guardado con éxito, "+m.from_user.first_name+".")
else:
bot.send_message(cid,"Ya se está debatifino un tema, "+m.from_user.first_name+".\n/fin para terminarlo.\n/actual para obtenerlo.")
########################################
# debate actual
@bot.message_handler(commands=['actual'])
def command_actual(m):
cid = m.chat.id
actual = get_matter(cid)
if actual != "":
bot.send_message(cid,"\"* "+actual+" *\" es el tema actual.\n\n/fin para terminarlo.",parse_mode="Markdown")
else:
bot.send_message(cid,"No hay debate actualmente.\n/nuevo para comenzar uno.")
########################################
# terminar el debate
@bot.message_handler(commands=['fin'])
def command_fin(m):
cid = m.chat.id
if get_matter(cid) != "":
uid = get_matter_id(cid)
fuid = m.from_user.id
if uid == fuid:
set_matter(cid)
set_matter_id(cid,uid)
bot.send_message(cid,"Tema cerrado, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
else:
bot.send_message(cid,"No tiene permiso para terminar el debate, "+m.from_user.first_name+".")
else:
bot.send_message(cid, "No hay debate actualmente, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
########################################
REGLASID = ""
# reglas
@bot.message_handler(commands=['reglas'])
def command_to_reglas(m):
cid = m.chat.id
if cid < 0:
REGLASID = str(cid)
bot.send_message(cid,"Pulse [aquí](https://telegram.me/debate_bot?start=reglas)",parse_mode="Markdown")
else:
command_reglas(m)
def command_reglas(m):
if REGLASID != "":
reglas = get_reglas(REGLASID)
else:
cid = m.chat.id
reglas = get_reglas(cid)
if reglas != "":
bot.reply_to(m,"Reglas de participación en este grupo:\n\n"+reglas)
else:
bot.reply_to(m,"No hay relgas definidas para este grupo.")
########################################
# definir las reglas
@bot.message_handler(commands=['definereglas'])
def command_definereglas(m):
cid = m.chat.id
text = m.text
pos = text.find(" ")
if pos != -1:
txt = m.text[pos+1:]
set_reglas(cid, txt)
else:
txt = ""
set_reglas(cid, txt)
###############################################################################
# functions
###############################################################################
##### matter #####
def set_matter(chatid,txt=""):
cid = str(chatid)
with open("./matter/"+cid+".mat",'w') as f:
f.write(txt)
def get_matter(chatid):
cid = str(chatid)
with open("./matter/"+cid+".mat",'a') as f:
pass
with open("./matter/"+cid+".mat",'r') as f:
matter = f.read()
return matter
##### reglas #####
def set_reglas(chatid, txt):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'w') as f:
f.write(txt)
def get_reglas(chatid):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'a') as f:
pass
with open("./reglas/"+cid+".rul",'r') as f:
reglas = f.read()
return reglas
##### matter id #####
def set_matter_id(chatid,userid):
cid = str(chatid)
uid = str(userid)
with open("./matter/"+cid+".matid",'w') as f:
f.write(uid)
def get_matter_id(chatid):
cid = str(chatid)
with open("./matter/"+cid+".matid",'a') as f:
pass
with open("./matter/"+cid+".matid",'r') as f:
uid = f.read()
if uid == "":
return -1
else:
return int(uid)
###############################################################################
bot.polling()
|
gpl-2.0
| 6,137,335,804,472,736,000 | 31.083333 | 232 | 0.520779 | false | 3.24113 | false | false | false |
chugunovyar/factoryForBuild
|
neuron/SaveClosedPossition.py
|
1
|
31069
|
# -*- coding: utf-8 -*-
import logging
from neuron.models import DataSet
import dateutil.parser as DP
loggermsg = logging.getLogger('django')
def saveClosedPossition(jsondata):
#loggermsg.info(len(jsondata))
# Проверяем есть ли такой ордер в БД
ifExistOrdernum = DataSet.objects.filter(open_magicnum=jsondata['magicnum'])
# Если нет такого ордера то записываем его в бд.
if len(ifExistOrdernum) == 0:
if float(jsondata['result']) > 0:
effectivnes = 1
else:
effectivnes = 0
dataToSave = DataSet(
open_magicnum = jsondata['magicnum'],\
open_neuron_name = jsondata['neuron_name'],\
open_period = jsondata['period'],\
orderOpenPrice = jsondata['openprice'],\
open_type = jsondata['open_type'],\
open_time = DP.parse(jsondata['orderopentime']),\
open_close_1 = jsondata['open_close_1'],\
open_open_1 = jsondata['open_open_1'],\
open_high_1 = jsondata['open_high_1'],\
open_low_1 = jsondata['open_low_1'],
open_upband_1 = jsondata['open_upband_1'],
open_lowband_1 = jsondata['open_lowband_1'],
open_midleband_1 = jsondata['open_midleband_1'],
open_jaw_1 = jsondata['open_jaw_1'],
open_lips_1 = jsondata['open_lips_1'],
open_teeth_1 = jsondata['open_teeth_1'],
open_volume_1 = jsondata['open_volume_1'],
open_close_2 = jsondata['open_close_2'],
open_open_2 = jsondata['open_open_2'],
open_high_2 = jsondata['open_high_2'],
open_low_2 = jsondata['open_low_2'],
open_upband_2 = jsondata['open_upband_2'],
open_lowband_2 = jsondata['open_lowband_2'],
open_midleband_2 = jsondata['open_midleband_2'],
open_jaw_2 = jsondata['open_jaw_2'],
open_lips_2 = jsondata['open_lips_2'],
open_teeth_2 = jsondata['open_teeth_2'],
open_volume_2 = jsondata['open_volume_2'],
open_close_3 = jsondata['open_close_3'],
open_open_3 = jsondata['open_open_3'],
open_high_3 = jsondata['open_high_3'],
open_low_3 = jsondata['open_low_3'],
open_upband_3 = jsondata['open_upband_3'],
open_lowband_3 = jsondata['open_lowband_3'],
open_midleband_3 = jsondata['open_midleband_3'],
open_jaw_3 = jsondata['open_jaw_3'],
open_lips_3 = jsondata['open_lips_3'],
open_teeth_3 = jsondata['open_teeth_3'],
open_volume_3 = jsondata['open_volume_3'],
open_close_4 = jsondata['open_close_4'],
open_open_4 = jsondata['open_open_4'],
open_high_4 = jsondata['open_high_4'],
open_low_4 = jsondata['open_low_4'],
open_upband_4 = jsondata['open_upband_4'],
open_lowband_4 = jsondata['open_lowband_4'],
open_midleband_4 = jsondata['open_midleband_4'],
open_jaw_4 = jsondata['open_jaw_4'],
open_lips_4 = jsondata['open_lips_4'],
open_teeth_4 = jsondata['open_teeth_4'],
open_volume_4 = jsondata['open_volume_4'],
open_close_5 = jsondata['open_close_5'],
open_open_5 = jsondata['open_open_5'],
open_high_5 = jsondata['open_high_5'],
open_low_5 = jsondata['open_low_5'],
open_upband_5 = jsondata['open_upband_5'],
open_lowband_5 = jsondata['open_lowband_5'],
open_midleband_5 = jsondata['open_midleband_5'],
open_jaw_5 = jsondata['open_jaw_5'],
open_lips_5 = jsondata['open_lips_5'],
open_teeth_5 = jsondata['open_teeth_5'],
open_volume_5 = jsondata['open_volume_5'],
open_close_6 = jsondata['open_close_6'],
open_open_6 = jsondata['open_open_6'],
open_high_6 = jsondata['open_high_6'],
open_low_6 = jsondata['open_low_6'],
open_upband_6 = jsondata['open_upband_6'],
open_lowband_6 = jsondata['open_lowband_6'],
open_midleband_6 = jsondata['open_midleband_6'],
open_jaw_6 = jsondata['open_jaw_6'],
open_lips_6 = jsondata['open_lips_6'],
open_teeth_6 = jsondata['open_teeth_6'],
open_volume_6 = jsondata['open_volume_6'],
open_close_7 = jsondata['open_close_7'],
open_open_7 = jsondata['open_open_7'],
open_high_7 = jsondata['open_high_7'],
open_low_7 = jsondata['open_low_7'],
open_upband_7 = jsondata['open_upband_7'],
open_lowband_7 = jsondata['open_lowband_7'],
open_midleband_7 = jsondata['open_midleband_7'],
open_jaw_7 = jsondata['open_jaw_7'],
open_lips_7 = jsondata['open_lips_7'],
open_teeth_7 = jsondata['open_teeth_7'],
open_volume_7 = jsondata['open_volume_7'],
open_close_8 = jsondata['open_close_8'],
open_open_8 = jsondata['open_open_8'],
open_high_8 = jsondata['open_high_8'],
open_low_8 = jsondata['open_low_8'],
open_upband_8 = jsondata['open_upband_8'],
open_lowband_8 = jsondata['open_lowband_8'],
open_midleband_8 = jsondata['open_midleband_8'],
open_jaw_8 = jsondata['open_jaw_8'],
open_lips_8 = jsondata['open_lips_8'],
open_teeth_8 = jsondata['open_teeth_8'],
open_volume_8 = jsondata['open_volume_8'],
open_close_9 = jsondata['open_close_9'],
open_open_9 = jsondata['open_open_9'],
open_high_9 = jsondata['open_high_9'],
open_low_9 = jsondata['open_low_9'],
open_upband_9 = jsondata['open_upband_9'],
open_lowband_9 = jsondata['open_lowband_9'],
open_midleband_9 = jsondata['open_midleband_9'],
open_jaw_9 = jsondata['open_jaw_9'],
open_lips_9 = jsondata['open_lips_9'],
open_teeth_9 = jsondata['open_teeth_9'],
open_volume_9 = jsondata['open_volume_9'],
open_close_10 = jsondata['open_close_10'],
open_open_10 = jsondata['open_open_10'],
open_high_10 = jsondata['open_high_10'],
open_low_10 = jsondata['open_low_10'],
open_upband_10 = jsondata['open_upband_10'],
open_lowband_10 = jsondata['open_lowband_10'],
open_midleband_10 = jsondata['open_midleband_10'],
open_jaw_10 = jsondata['open_jaw_10'],
open_lips_10 = jsondata['open_lips_10'],
open_teeth_10 = jsondata['open_teeth_10'],
open_volume_10 = jsondata['open_volume_10'],
)
dataToSave.save()
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
open_close_11 = jsondata['open_close_11'],
open_open_11 = jsondata['open_open_11'],
open_high_11 = jsondata['open_high_11'],
open_low_11 = jsondata['open_low_11'],
open_upband_11 = jsondata['open_upband_11'],
open_lowband_11 = jsondata['open_lowband_11'],
open_midleband_11 = jsondata['open_midleband_11'],
open_jaw_11 = jsondata['open_jaw_11'],
open_lips_11 = jsondata['open_lips_11'],
open_teeth_11 = jsondata['open_teeth_11'],
open_volume_11 = jsondata['open_volume_11'],
open_close_12 = jsondata['open_close_12'],
open_open_12 = jsondata['open_open_12'],
open_high_12 = jsondata['open_high_12'],
open_low_12 = jsondata['open_low_12'],
open_upband_12 = jsondata['open_upband_12'],
open_lowband_12 = jsondata['open_lowband_12'],
open_midleband_12 = jsondata['open_midleband_12'],
open_jaw_12 = jsondata['open_jaw_12'],
open_lips_12 = jsondata['open_lips_12'],
open_teeth_12 = jsondata['open_teeth_12'],
open_volume_12 = jsondata['open_volume_12'],
open_close_13 = jsondata['open_close_13'],
open_open_13 = jsondata['open_open_13'],
open_high_13 = jsondata['open_high_13'],
open_low_13 = jsondata['open_low_13'],
open_upband_13 = jsondata['open_upband_13'],
open_lowband_13 = jsondata['open_lowband_13'],
open_midleband_13 = jsondata['open_midleband_13'],
open_jaw_13 = jsondata['open_jaw_13'],
open_lips_13 = jsondata['open_lips_13'],
open_teeth_13 = jsondata['open_teeth_13'],
open_volume_13 = jsondata['open_volume_13'],
open_close_14 = jsondata['open_close_14'],
open_open_14 = jsondata['open_open_14'],
open_high_14 = jsondata['open_high_14'],
open_low_14 = jsondata['open_low_14'],
open_upband_14 = jsondata['open_upband_14'],
open_lowband_14 = jsondata['open_lowband_14'],
open_midleband_14 = jsondata['open_midleband_14'],
open_jaw_14 = jsondata['open_jaw_14'],
open_lips_14 = jsondata['open_lips_14'],
open_teeth_14 = jsondata['open_teeth_14'],
open_volume_14 = jsondata['open_volume_14'],
open_close_15 = jsondata['open_close_15'],
open_open_15 = jsondata['open_open_15'],
open_high_15 = jsondata['open_high_15'],
open_low_15 = jsondata['open_low_15'],
open_upband_15 = jsondata['open_upband_15'],
open_lowband_15 = jsondata['open_lowband_15'],
open_midleband_15 = jsondata['open_midleband_15'],
open_jaw_15 = jsondata['open_jaw_15'],
open_lips_15 = jsondata['open_lips_15'],
open_teeth_15 = jsondata['open_teeth_15'],
open_volume_15 = jsondata['open_volume_15'],
open_close_16 = jsondata['open_close_16'],
open_open_16 = jsondata['open_open_16'],
open_high_16 = jsondata['open_high_16'],
open_low_16 = jsondata['open_low_16'],
open_upband_16 = jsondata['open_upband_16'],
open_lowband_16 = jsondata['open_lowband_16'],
open_midleband_16 = jsondata['open_midleband_16'],
open_jaw_16 = jsondata['open_jaw_16'],
open_lips_16 = jsondata['open_lips_16'],
open_teeth_16 = jsondata['open_teeth_16'],
open_volume_16 = jsondata['open_volume_16'],
open_close_17 = jsondata['open_close_17'],
open_open_17 = jsondata['open_open_17'],
open_high_17 = jsondata['open_high_17'],
open_low_17 = jsondata['open_low_17'],
open_upband_17 = jsondata['open_upband_17'],
open_lowband_17 = jsondata['open_lowband_17'],
open_midleband_17 = jsondata['open_midleband_17'],
open_jaw_17 = jsondata['open_jaw_17'],
open_lips_17 = jsondata['open_lips_17'],
open_teeth_17 = jsondata['open_teeth_17'],
open_volume_17 = jsondata['open_volume_17'],
open_close_18 = jsondata['open_close_18'],
open_open_18 = jsondata['open_open_18'],
open_high_18 = jsondata['open_high_18'],
open_low_18 = jsondata['open_low_18'],
open_upband_18 = jsondata['open_upband_18'],
open_lowband_18 = jsondata['open_lowband_18'],
open_midleband_18 = jsondata['open_midleband_18'],
open_jaw_18 = jsondata['open_jaw_18'],
open_lips_18 = jsondata['open_lips_18'],
open_teeth_18 = jsondata['open_teeth_18'],
open_volume_18 = jsondata['open_volume_18'],
open_close_19 = jsondata['open_close_19'],
open_open_19 = jsondata['open_open_19'],
open_high_19 = jsondata['open_high_19'],
open_low_19 = jsondata['open_low_19'],
open_upband_19 = jsondata['open_upband_19'],
open_lowband_19 = jsondata['open_lowband_19'],
open_midleband_19 = jsondata['open_midleband_19'],
open_jaw_19 = jsondata['open_jaw_19'],
open_lips_19 = jsondata['open_lips_19'],
open_teeth_19 = jsondata['open_teeth_19'],
open_volume_19 = jsondata['open_volume_19'],
open_close_20 = jsondata['open_close_20'],
open_open_20 = jsondata['open_open_20'],
open_high_20 = jsondata['open_high_20'],
open_low_20 = jsondata['open_low_20'],
open_upband_20 = jsondata['open_upband_20'],
open_lowband_20 = jsondata['open_lowband_20'],
open_midleband_20 = jsondata['open_midleband_20'],
open_jaw_20 = jsondata['open_jaw_20'],
open_lips_20 = jsondata['open_lips_20'],
open_teeth_20 = jsondata['open_teeth_20'],
open_volume_20 = jsondata['open_volume_20'],
open_close_21 = jsondata['open_close_21'],
open_open_21 = jsondata['open_open_21'],
open_high_21 = jsondata['open_high_21'],
open_low_21 = jsondata['open_low_21'],
open_upband_21 = jsondata['open_upband_21'],
open_lowband_21 = jsondata['open_lowband_21'],
open_midleband_21 = jsondata['open_midleband_21'],
open_jaw_21 = jsondata['open_jaw_21'],
open_lips_21 = jsondata['open_lips_21'],
open_teeth_21 = jsondata['open_teeth_21'],
open_volume_21 = jsondata['open_volume_21'],
open_close_22 = jsondata['open_close_22'],
open_open_22 = jsondata['open_open_22'],
open_high_22 = jsondata['open_high_22'],
open_low_22 = jsondata['open_low_22'],
open_upband_22 = jsondata['open_upband_22'],
open_lowband_22 = jsondata['open_lowband_22'],
open_midleband_22 = jsondata['open_midleband_22'],
open_jaw_22 = jsondata['open_jaw_22'],
open_lips_22 = jsondata['open_lips_22'],
open_teeth_22 = jsondata['open_teeth_22'],
open_volume_22 = jsondata['open_volume_22'],
open_close_23 = jsondata['open_close_23'],
open_open_23 = jsondata['open_open_23'],
open_high_23 = jsondata['open_high_23'],
open_low_23 = jsondata['open_low_23'],
open_upband_23 = jsondata['open_upband_23'],
open_lowband_23 = jsondata['open_lowband_23'],
open_midleband_23 = jsondata['open_midleband_23'],
open_jaw_23 = jsondata['open_jaw_23'],
open_lips_23 = jsondata['open_lips_23'],
open_teeth_23 = jsondata['open_teeth_23'],
open_volume_23 = jsondata['open_volume_23'],
open_close_24 = jsondata['open_close_24'],
open_open_24 = jsondata['open_open_24'],
open_high_24 = jsondata['open_high_24'],
open_low_24 = jsondata['open_low_24'],
open_upband_24 = jsondata['open_upband_24'],
open_lowband_24 = jsondata['open_lowband_24'],
open_midleband_24 = jsondata['open_midleband_24'],
open_jaw_24 = jsondata['open_jaw_24'],
open_lips_24 = jsondata['open_lips_24'],
open_teeth_24 = jsondata['open_teeth_24'],
open_volume_24 = jsondata['open_volume_24']
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_1 = jsondata['close_close_1'],
close_open_1 = jsondata['close_open_1'],
close_high_1 = jsondata['close_high_1'],
close_low_1 = jsondata['close_low_1'],
close_upband_1 = jsondata['close_upband_1'],
close_lowband_1 = jsondata['close_lowband_1'],
close_midleband_1 = jsondata['close_midleband_1'],
close_jaw_1 = jsondata['close_jaw_1'],
close_lips_1 = jsondata['close_lips_1'],
close_teeth_1 = jsondata['close_teeth_1'],
close_volume_1 = jsondata['close_volume_1'],
close_close_2 = jsondata['close_close_2'],
close_open_2 = jsondata['close_open_2'],
close_high_2 = jsondata['close_high_2'],
close_low_2 = jsondata['close_low_2'],
close_upband_2 = jsondata['close_upband_2'],
close_lowband_2 = jsondata['close_lowband_2'],
close_midleband_2 = jsondata['close_midleband_2'],
close_jaw_2 = jsondata['close_jaw_2'],
close_lips_2 = jsondata['close_lips_2'],
close_teeth_2 = jsondata['close_teeth_2'],
close_volume_2 = jsondata['close_volume_2'],
close_close_3 = jsondata['close_close_3'],
close_open_3 = jsondata['close_open_3'],
close_high_3 = jsondata['close_high_3'],
close_low_3 = jsondata['close_low_3'],
close_upband_3 = jsondata['close_upband_3'],
close_lowband_3 = jsondata['close_lowband_3'],
close_midleband_3 = jsondata['close_midleband_3'],
close_jaw_3 = jsondata['close_jaw_3'],
close_lips_3 = jsondata['close_lips_3'],
close_teeth_3 = jsondata['close_teeth_3'],
close_volume_3 = jsondata['close_volume_3'],
close_close_4 = jsondata['close_close_4'],
close_open_4 = jsondata['close_open_4'],
close_high_4 = jsondata['close_high_4'],
close_low_4 = jsondata['close_low_4'],
close_upband_4 = jsondata['close_upband_4'],
close_lowband_4 = jsondata['close_lowband_4'],
close_midleband_4 = jsondata['close_midleband_4'],
close_jaw_4 = jsondata['close_jaw_4'],
close_lips_4 = jsondata['close_lips_4'],
close_teeth_4 = jsondata['close_teeth_4'],
close_volume_4 = jsondata['close_volume_4'],
close_close_5 = jsondata['close_close_5'],
close_open_5 = jsondata['close_open_5'],
close_high_5 = jsondata['close_high_5'],
close_low_5 = jsondata['close_low_5'],
close_upband_5 = jsondata['close_upband_5'],
close_lowband_5 = jsondata['close_lowband_5'],
close_midleband_5 = jsondata['close_midleband_5'],
close_jaw_5 = jsondata['close_jaw_5'],
close_lips_5 = jsondata['close_lips_5'],
close_teeth_5 = jsondata['close_teeth_5'],
close_volume_5 = jsondata['close_volume_5'],
close_close_6 = jsondata['close_close_6'],
close_open_6 = jsondata['close_open_6'],
close_high_6 = jsondata['close_high_6'],
close_low_6 = jsondata['close_low_6'],
close_upband_6 = jsondata['close_upband_6'],
close_lowband_6 = jsondata['close_lowband_6'],
close_midleband_6 = jsondata['close_midleband_6'],
close_jaw_6 = jsondata['close_jaw_6'],
close_lips_6 = jsondata['close_lips_6'],
close_teeth_6 = jsondata['close_teeth_6'],
close_volume_6 = jsondata['close_volume_6'],
close_close_7 = jsondata['close_close_7'],
close_open_7 = jsondata['close_open_7'],
close_high_7 = jsondata['close_high_7'],
close_low_7 = jsondata['close_low_7'],
close_upband_7 = jsondata['close_upband_7'],
close_lowband_7 = jsondata['close_lowband_7'],
close_midleband_7 = jsondata['close_midleband_7'],
close_jaw_7 = jsondata['close_jaw_7'],
close_lips_7 = jsondata['close_lips_7'],
close_teeth_7 = jsondata['close_teeth_7'],
close_volume_7 = jsondata['close_volume_7'],
close_close_8 = jsondata['close_close_8'],
close_open_8 = jsondata['close_open_8'],
close_high_8 = jsondata['close_high_8'],
close_low_8 = jsondata['close_low_8'],
close_upband_8 = jsondata['close_upband_8'],
close_lowband_8 = jsondata['close_lowband_8'],
close_midleband_8 = jsondata['close_midleband_8'],
close_jaw_8 = jsondata['close_jaw_8'],
close_lips_8 = jsondata['close_lips_8'],
close_teeth_8 = jsondata['close_teeth_8'],
close_volume_8 = jsondata['close_volume_8'],
close_close_9 = jsondata['close_close_9'],
close_open_9 = jsondata['close_open_9'],
close_high_9 = jsondata['close_high_9'],
close_low_9 = jsondata['close_low_9'],
close_upband_9 = jsondata['close_upband_9'],
close_lowband_9 = jsondata['close_lowband_9'],
close_midleband_9 = jsondata['close_midleband_9'],
close_jaw_9 = jsondata['close_jaw_9'],
close_lips_9 = jsondata['close_lips_9'],
close_teeth_9 = jsondata['close_teeth_9'],
close_volume_9 = jsondata['close_volume_9'],
close_close_10 = jsondata['close_close_10'],
close_open_10 = jsondata['close_open_10'],
close_high_10 = jsondata['close_high_10'],
close_low_10 = jsondata['close_low_10'],
close_upband_10 = jsondata['close_upband_10'],
close_lowband_10 = jsondata['close_lowband_10'],
close_midleband_10 = jsondata['close_midleband_10'],
close_jaw_10 = jsondata['close_jaw_10'],
close_lips_10 = jsondata['close_lips_10'],
close_teeth_10 = jsondata['close_teeth_10'],
close_volume_10 = jsondata['close_volume_10'],
close_close_11 = jsondata['close_close_11'],
close_open_11 = jsondata['close_open_11'],
close_high_11 = jsondata['close_high_11'],
close_low_11 = jsondata['close_low_11'],
close_upband_11 = jsondata['close_upband_11'],
close_lowband_11 = jsondata['close_lowband_11'],
close_midleband_11 = jsondata['close_midleband_11'],
close_jaw_11 = jsondata['close_jaw_11'],
close_lips_11 = jsondata['close_lips_11'],
close_teeth_11 = jsondata['close_teeth_11'],
close_volume_11 = jsondata['close_volume_11'],
close_close_12 = jsondata['close_close_12'],
close_open_12 = jsondata['close_open_12'],
close_high_12 = jsondata['close_high_12'],
close_low_12 = jsondata['close_low_12'],
close_upband_12 = jsondata['close_upband_12'],
close_lowband_12 = jsondata['close_lowband_12'],
close_midleband_12 = jsondata['close_midleband_12'],
close_jaw_12 = jsondata['close_jaw_12'],
close_lips_12 = jsondata['close_lips_12'],
close_teeth_12 = jsondata['close_teeth_12'],
close_volume_12 = jsondata['close_volume_12'],
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_13 = jsondata['close_close_13'],
close_open_13 = jsondata['close_open_13'],
close_high_13 = jsondata['close_high_13'],
close_low_13 = jsondata['close_low_13'],
close_upband_13 = jsondata['close_upband_13'],
close_lowband_13 = jsondata['close_lowband_13'],
close_midleband_13 = jsondata['close_midleband_13'],
close_jaw_13 = jsondata['close_jaw_13'],
close_lips_13 = jsondata['close_lips_13'],
close_teeth_13 = jsondata['close_teeth_13'],
close_volume_13 = jsondata['close_volume_13'],
close_close_14 = jsondata['close_close_14'],
close_open_14 = jsondata['close_open_14'],
close_high_14 = jsondata['close_high_14'],
close_low_14 = jsondata['close_low_14'],
close_upband_14 = jsondata['close_upband_14'],
close_lowband_14 = jsondata['close_lowband_14'],
close_midleband_14 = jsondata['close_midleband_14'],
close_jaw_14 = jsondata['close_jaw_14'],
close_lips_14 = jsondata['close_lips_14'],
close_teeth_14 = jsondata['close_teeth_14'],
close_volume_14 = jsondata['close_volume_14'],
close_close_15 = jsondata['close_close_15'],
close_open_15 = jsondata['close_open_15'],
close_high_15 = jsondata['close_high_15'],
close_low_15 = jsondata['close_low_15'],
close_upband_15 = jsondata['close_upband_15'],
close_lowband_15 = jsondata['close_lowband_15'],
close_midleband_15 = jsondata['close_midleband_15'],
close_jaw_15 = jsondata['close_jaw_15'],
close_lips_15 = jsondata['close_lips_15'],
close_teeth_15 = jsondata['close_teeth_15'],
close_volume_15 = jsondata['close_volume_15'],
close_close_16 = jsondata['close_close_16'],
close_open_16 = jsondata['close_open_16'],
close_high_16 = jsondata['close_high_16'],
close_low_16 = jsondata['close_low_16'],
close_upband_16 = jsondata['close_upband_16'],
close_lowband_16 = jsondata['close_lowband_16'],
close_midleband_16 = jsondata['close_midleband_16'],
close_jaw_16 = jsondata['close_jaw_16'],
close_lips_16 = jsondata['close_lips_16'],
close_teeth_16 = jsondata['close_teeth_16'],
close_volume_16 = jsondata['close_volume_16'],
close_close_17 = jsondata['close_close_17'],
close_open_17 = jsondata['close_open_17'],
close_high_17 = jsondata['close_high_17'],
close_low_17 = jsondata['close_low_17'],
close_upband_17 = jsondata['close_upband_17'],
close_lowband_17 = jsondata['close_lowband_17'],
close_midleband_17 = jsondata['close_midleband_17'],
close_jaw_17 = jsondata['close_jaw_17'],
close_lips_17 = jsondata['close_lips_17'],
close_teeth_17 = jsondata['close_teeth_17'],
close_volume_17 = jsondata['close_volume_17'],
close_close_18 = jsondata['close_close_18'],
close_open_18 = jsondata['close_open_18'],
close_high_18 = jsondata['close_high_18'],
close_low_18 = jsondata['close_low_18'],
close_upband_18 = jsondata['close_upband_18'],
close_lowband_18 = jsondata['close_lowband_18'],
close_midleband_18 = jsondata['close_midleband_18'],
close_jaw_18 = jsondata['close_jaw_18'],
close_lips_18 = jsondata['close_lips_18'],
close_teeth_18 = jsondata['close_teeth_18'],
close_volume_18 = jsondata['close_volume_18'],
close_close_19 = jsondata['close_close_19'],
close_open_19 = jsondata['close_open_19'],
close_high_19 = jsondata['close_high_19'],
close_low_19 = jsondata['close_low_19'],
close_upband_19 = jsondata['close_upband_19'],
close_lowband_19 = jsondata['close_lowband_19'],
close_midleband_19 = jsondata['close_midleband_19'],
close_jaw_19 = jsondata['close_jaw_19'],
close_lips_19 = jsondata['close_lips_19'],
close_teeth_19 = jsondata['close_teeth_19'],
close_volume_19 = jsondata['close_volume_19'],
close_close_20 = jsondata['close_close_20'],
close_open_20 = jsondata['close_open_20'],
close_high_20 = jsondata['close_high_20'],
close_low_20 = jsondata['close_low_20'],
close_upband_20 = jsondata['close_upband_20'],
close_lowband_20 = jsondata['close_lowband_20'],
close_midleband_20 = jsondata['close_midleband_20'],
close_jaw_20 = jsondata['close_jaw_20'],
close_lips_20 = jsondata['close_lips_20'],
close_teeth_20 = jsondata['close_teeth_20'],
close_volume_20 = jsondata['close_volume_20'],
close_close_21 = jsondata['close_close_21'],
close_open_21 = jsondata['close_open_21'],
close_high_21 = jsondata['close_high_21'],
close_low_21 = jsondata['close_low_21'],
close_upband_21 = jsondata['close_upband_21'],
close_lowband_21 = jsondata['close_lowband_21'],
close_midleband_21 = jsondata['close_midleband_21'],
close_jaw_21 = jsondata['close_jaw_21'],
close_lips_21 = jsondata['close_lips_21'],
close_teeth_21 = jsondata['close_teeth_21'],
close_volume_21 = jsondata['close_volume_21'],
close_close_22 = jsondata['close_close_22'],
close_open_22 = jsondata['close_open_22'],
close_high_22 = jsondata['close_high_22'],
close_low_22 = jsondata['close_low_22'],
close_upband_22 = jsondata['close_upband_22'],
close_lowband_22 = jsondata['close_lowband_22'],
close_midleband_22 = jsondata['close_midleband_22'],
close_jaw_22 = jsondata['close_jaw_22'],
close_lips_22 = jsondata['close_lips_22'],
close_teeth_22 = jsondata['close_teeth_22'],
close_volume_22 = jsondata['close_volume_22'],
close_close_23 = jsondata['close_close_23'],
close_open_23 = jsondata['close_open_23'],
close_high_23 = jsondata['close_high_23'],
close_low_23 = jsondata['close_low_23'],
close_upband_23 = jsondata['close_upband_23'],
close_lowband_23 = jsondata['close_lowband_23'],
close_midleband_23 = jsondata['close_midleband_23'],
close_jaw_23 = jsondata['close_jaw_23'],
close_lips_23 = jsondata['close_lips_23'],
close_teeth_23 = jsondata['close_teeth_23'],
close_volume_23 = jsondata['close_volume_23'],
close_close_24 = jsondata['close_close_24'],
close_open_24 = jsondata['close_open_24'],
close_high_24 = jsondata['close_high_24'],
close_low_24 = jsondata['close_low_24'],
close_upband_24 = jsondata['close_upband_24'],
close_lowband_24 = jsondata['close_lowband_24'],
close_midleband_24 = jsondata['close_midleband_24'],
close_jaw_24 = jsondata['close_jaw_24'],
close_lips_24 = jsondata['close_lips_24'],
close_teeth_24 = jsondata['close_teeth_24'],
close_volume_24 = jsondata['close_volume_24'],
close_result = jsondata['result'],
close_effectivnes = effectivnes,
close_neuron_name = jsondata['neuron_name'],
close_closeprice = jsondata['closeprice'],
close_time = DP.parse(jsondata['orderclosetime'])
)
|
gpl-3.0
| -1,601,113,315,527,009,800 | 49.413008 | 135 | 0.546768 | false | 3.238692 | false | false | false |
diego-d5000/MisValesMd
|
env/lib/python2.7/site-packages/django/core/checks/model_checks.py
|
1
|
2454
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import types
from django.apps import apps
from django.core.checks import Error, Tags, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
errors = []
for model in apps.get_models():
if app_configs is None or model._meta.app_config in app_configs:
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is "
"currently overridden by %r." % (
model.__name__, model.check),
hint=None,
obj=model,
id='models.E020'
)
)
else:
errors.extend(model.check(**kwargs))
return errors
@register(Tags.models, Tags.signals)
def check_model_signals(app_configs=None, **kwargs):
"""
Ensure lazily referenced model signals senders are installed.
"""
# Avoid circular import
from django.db import models
errors = []
for name in dir(models.signals):
obj = getattr(models.signals, name)
if isinstance(obj, models.signals.ModelSignal):
for reference, receivers in obj.unresolved_references.items():
for receiver, _, _ in receivers:
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The '%s' function" % receiver.__name__
else:
description = "An instance of the '%s' class" % receiver.__class__.__name__
errors.append(
Error(
"%s was connected to the '%s' signal "
"with a lazy reference to the '%s' sender, "
"which has not been installed." % (
description, name, '.'.join(reference)
),
obj=receiver.__module__,
hint=None,
id='signals.E001'
)
)
return errors
|
mit
| 6,105,422,011,354,093,000 | 36.34375 | 99 | 0.467808 | false | 5.155462 | false | false | false |
ahmetcemturan/SFACT
|
skeinforge_application/skeinforge_plugins/craft_plugins/limit.py
|
1
|
8282
|
#! /usr/bin/env python
"""
This page is in the table of contents.
This plugin limits the feed rate of the tool head, so that the stepper motors are not driven too fast and skip steps.
The limit manual page is at:
http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit
The maximum z feed rate is defined in speed.
==Operation==
The default 'Activate Limit' checkbox is on. When it is on, the functions described below will work, when it is off, nothing will be done.
==Settings==
===Maximum Initial Feed Rate===
Default is one millimeter per second.
Defines the maximum speed of the inital tool head move.
==Examples==
The following examples limit the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and limit.py.
> python limit.py
This brings up the limit dialog.
> python limit.py Screw Holder Bottom.stl
The limit tool is parsing the file:
Screw Holder Bottom.stl
..
The limit tool has created the file:
.. Screw Holder Bottom_limit.gcode
"""
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from datetime import date
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import intercircle
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import math
import os
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__date__ = '$Date: 2008/28/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCraftedText(fileName, gcodeText='', repository=None):
'Limit a gcode file or text.'
return getCraftedTextFromText( archive.getTextIfEmpty(fileName, gcodeText), repository )
def getCraftedTextFromText(gcodeText, repository=None):
'Limit a gcode text.'
if gcodec.isProcedureDoneOrFileIsEmpty(gcodeText, 'limit'):
return gcodeText
if repository == None:
repository = settings.getReadRepository(LimitRepository())
if not repository.activateLimit.value:
return gcodeText
return LimitSkein().getCraftedGcode(gcodeText, repository)
def getNewRepository():
'Get new repository.'
return LimitRepository()
def writeOutput(fileName, shouldAnalyze=True):
'Limit a gcode file.'
skeinforge_craft.writeChainTextWithNounMessage(fileName, 'limit', shouldAnalyze)
class LimitRepository:
'A class to handle the limit settings.'
def __init__(self):
'Set the default settings, execute title & settings fileName.'
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.limit.html', self )
self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Limit', self, '')
self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit')
self.activateLimit = settings.BooleanSetting().getFromValue('Activate Limit', self, False)
self.maximumInitialFeedRate = settings.FloatSpin().getFromValue(0.5, 'Maximum Initial Feed Rate (mm/s):', self, 10.0, 1.0)
self.executeTitle = 'Limit'
def execute(self):
'Limit button has been clicked.'
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class LimitSkein:
'A class to limit a skein of extrusions.'
def __init__(self):
self.distanceFeedRate = gcodec.DistanceFeedRate()
self.feedRateMinute = None
self.lineIndex = 0
self.maximumZDrillFeedRatePerSecond = 987654321.0
self.maximumZFeedRatePerSecond = 2.0
self.oldLocation = None
def getCraftedGcode(self, gcodeText, repository):
'Parse gcode text and store the limit gcode.'
self.repository = repository
self.lines = archive.getTextLines(gcodeText)
self.parseInitialization()
self.maximumZDrillFeedRatePerSecond = min(self.maximumZDrillFeedRatePerSecond, self.maximumZFeedRatePerSecond)
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
for lineIndex in xrange(self.lineIndex, len(self.lines)):
self.parseLine( lineIndex )
return self.distanceFeedRate.output.getvalue()
def getLimitedInitialMovement(self, line, splitLine):
'Get a limited linear movement.'
if self.oldLocation == None:
line = self.distanceFeedRate.getLineWithFeedRate(60.0 * self.repository.maximumInitialFeedRate.value, line, splitLine)
return line
def getZLimitedLine(self, deltaZ, distance, line, splitLine):
'Get a replaced z limited gcode movement line.'
zFeedRateSecond = self.feedRateMinute * deltaZ / distance / 60.0
if zFeedRateSecond <= self.maximumZCurrentFeedRatePerSecond:
return line
limitedFeedRateMinute = self.feedRateMinute * self.maximumZCurrentFeedRatePerSecond / zFeedRateSecond
return self.distanceFeedRate.getLineWithFeedRate(limitedFeedRateMinute, line, splitLine)
def getZLimitedLineArc(self, line, splitLine):
'Get a replaced z limited gcode arc movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if self.feedRateMinute == None or self.oldLocation == None:
return line
relativeLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
self.oldLocation += relativeLocation
deltaZ = abs(relativeLocation.z)
distance = gcodec.getArcDistance(relativeLocation, splitLine)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def getZLimitedLineLinear(self, line, location, splitLine):
'Get a replaced z limited gcode linear movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if location == self.oldLocation:
return ''
if self.feedRateMinute == None or self.oldLocation == None:
return line
deltaZ = abs(location.z - self.oldLocation.z)
distance = abs(location - self.oldLocation)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def parseInitialization(self):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
self.distanceFeedRate.parseSplitLine(firstWord, splitLine)
if firstWord == '(</extruderInitialization>)':
self.distanceFeedRate.addTagBracketedProcedure('limit')
return
elif firstWord == '(<maximumZDrillFeedRatePerSecond>':
self.maximumZDrillFeedRatePerSecond = float(splitLine[1])
elif firstWord == '(<maximumZFeedRatePerSecond>':
self.maximumZFeedRatePerSecond = float(splitLine[1])
self.distanceFeedRate.addLine(line)
def parseLine( self, lineIndex ):
'Parse a gcode line and add it to the limit skein.'
line = self.lines[lineIndex].lstrip()
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
if len(splitLine) < 1:
return
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'G1':
location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
line = self.getLimitedInitialMovement(line, splitLine)
line = self.getZLimitedLineLinear(line, location, splitLine)
self.oldLocation = location
elif firstWord == 'G2' or firstWord == 'G3':
line = self.getZLimitedLineArc(line, splitLine)
elif firstWord == 'M101':
self.maximumZCurrentFeedRatePerSecond = self.maximumZDrillFeedRatePerSecond
elif firstWord == 'M103':
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
self.distanceFeedRate.addLine(line)
def main():
'Display the limit dialog.'
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == '__main__':
main()
|
agpl-3.0
| 246,727,834,341,910,940 | 40 | 180 | 0.781574 | false | 3.328778 | false | false | false |
mfnch/pyrtist
|
old/web/in/examples/create_example.py
|
1
|
2754
|
import sys, os, os.path, commands, re
usage = "USAGE: python create_example.py box.example"
if len(sys.argv) != 2:
raise "Expected one argument.\n" + usage
example_file = sys.argv[1]
print "Working on '%s'..." % example_file
# Default values for variables which may be changed inside example_file
in_directory = ".."
box = "box -l g"
convert = "convert"
convert_opts = ""
highlight = "%s/../katehighlight/bin/highlight" % in_directory
rst_skeleton = "skeleton"
rst_out = None
title = None
description = None
figure_caption = None
box_source = None
out_eps = None
out_png = None
_f = open(example_file)
exec(_f)
_f.close()
if title == None:
title = "Box example: %s" % crumb
print "Removing old figure if present..."
if out_eps and os.access(out_eps, os.W_OK):
try:
os.remove(out_eps)
except:
print "Failed to remove the figure: continuing anyway..."
print "Executing the Box program..."
print commands.getoutput("%s %s" % (box, box_source))
have_figure = False
if out_eps and os.access(out_eps, os.R_OK):
print "Adjusting eps figure..."
out_png = os.path.splitext(out_eps)[0] + ".png"
print commands.getoutput("%s %s %s %s" %
(convert, convert_opts, out_eps, out_png))
print out_png
have_figure = os.access(out_png, os.R_OK)
if not have_figure:
raise "The figure '%s' has not been produced: stopping here!" % out_png
print "Highlighting the Box source..."
highlighted_source = "/tmp/h.html"
print commands.getoutput("%s Box %s %s" % (highlight, box_source, highlighted_source))
f = open(highlighted_source, "r")
htmlized_box_program = f.read()
f.close()
print "Opening the skeleton..."
f = open(rst_skeleton, "r")
data_skeleton = f.read()
f.close()
vars_dict = {
'title': title,
'description': description,
'crumb': crumb,
'box_file':box_source,
'figure_caption':figure_caption,
'image': out_png,
'htmlized_box_program': htmlized_box_program
}
r = re.compile("[$][^$]*[$]")
def substitutor(var):
try:
var_name = var.group(0)[1:-1]
except:
raise "Error when substituting variable."
if vars_dict.has_key(var_name):
return str(vars_dict[var_name])
print "WARNING: Variable '%s' not found!" % var_name
return var.group(0)
print "Filling the skeleton..."
out = re.sub(r, substitutor, data_skeleton)
f = open(rst_out, "w")
f.write(out)
f.close()
print "Output produced (%s)" % rst_out
print "Generating thumbnail..."
html_out = os.path.splitext(out_png)[0] + ".html"
out_thumb_png = "small_" + out_png
scale_opts = "-scale 100"
print commands.getoutput("%s %s %s %s"
% (convert, scale_opts, out_png, out_thumb_png))
f = open("thumbnails.dat", "a")
f.write("%s, %s\n" % (html_out, out_thumb_png))
f.close()
|
lgpl-2.1
| 1,265,988,056,238,007,300 | 24.738318 | 86 | 0.649601 | false | 2.932907 | false | false | false |
tkwon/dj-stripe
|
djstripe/migrations/0025_auto_20170322_0428.py
|
1
|
3906
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-22 04:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0024_auto_20170308_0757'),
]
operations = [
migrations.AlterField(
model_name='account',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='account',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='charge',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='charge',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='customer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='customer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='event',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='event',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoice',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoice',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='plan',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='plan',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='stripesource',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='stripesource',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='subscription',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='subscription',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='transfer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='transfer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
]
|
mit
| 8,493,379,797,407,598,000 | 30.248 | 58 | 0.536354 | false | 4.746051 | false | false | false |
eqcorrscan/ci.testing
|
eqcorrscan/utils/stacking.py
|
1
|
6254
|
"""
Utility module of the EQcorrscan package to allow for different methods of \
stacking of seismic signal in one place.
:copyright:
EQcorrscan developers.
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from scipy.signal import hilbert
from copy import deepcopy
from eqcorrscan.core.match_filter import normxcorr2
def linstack(streams, normalize=True):
"""
Compute the linear stack of a series of seismic streams of \
multiplexed data.
:type streams: list
:param streams: List of streams to stack
:type normalize: bool
:param normalize: Normalize traces before stacking, normalizes by the RMS \
amplitude.
:returns: stacked data
:rtype: :class:`obspy.core.stream.Stream`
"""
stack = streams[np.argmax([len(stream) for stream in streams])].copy()
if normalize:
for tr in stack:
tr.data = tr.data / np.sqrt(np.mean(np.square(tr.data)))
tr.data = np.nan_to_num(tr.data)
for i in range(1, len(streams)):
for tr in stack:
matchtr = streams[i].select(station=tr.stats.station,
channel=tr.stats.channel)
if matchtr:
# Normalize the data before stacking
if normalize:
norm = matchtr[0].data /\
np.sqrt(np.mean(np.square(matchtr[0].data)))
norm = np.nan_to_num(norm)
else:
norm = matchtr[0].data
tr.data = np.sum((norm, tr.data), axis=0)
return stack
def PWS_stack(streams, weight=2, normalize=True):
"""
Compute the phase weighted stack of a series of streams.
.. note:: It is recommended to align the traces before stacking.
:type streams: list
:param streams: List of :class:`obspy.core.stream.Stream` to stack.
:type weight: float
:param weight: Exponent to the phase stack used for weighting.
:type normalize: bool
:param normalize: Normalize traces before stacking.
:return: Stacked stream.
:rtype: :class:`obspy.core.stream.Stream`
"""
# First get the linear stack which we will weight by the phase stack
Linstack = linstack(streams)
# Compute the instantaneous phase
instaphases = []
print("Computing instantaneous phase")
for stream in streams:
instaphase = stream.copy()
for tr in instaphase:
analytic = hilbert(tr.data)
envelope = np.sqrt(np.sum((np.square(analytic),
np.square(tr.data)), axis=0))
tr.data = analytic / envelope
instaphases.append(instaphase)
# Compute the phase stack
print("Computing the phase stack")
Phasestack = linstack(instaphases, normalize=normalize)
# Compute the phase-weighted stack
for tr in Phasestack:
tr.data = Linstack.select(station=tr.stats.station)[0].data *\
np.abs(tr.data ** weight)
return Phasestack
def align_traces(trace_list, shift_len, master=False, positive=False,
plot=False):
"""
Align traces relative to each other based on their cross-correlation value.
Uses the :func:`obspy.signal.cross_correlation.xcorr` function to find the
optimum shift to align traces relative to a master event. Either uses a
given master to align traces, or uses the first trace in the list.
.. Note::
The cross-correlation function may yield an error/warning
about shift_len being too large: this is raised by the
:func:`obspy.signal.cross_correlation.xcorr` routine when the shift_len
is greater than half the length of either master or a trace, then
the correlation will not be robust. We may switch to a different
correlation routine later.
:type trace_list: list
:param trace_list: List of traces to align
:type shift_len: int
:param shift_len: Length to allow shifting within in samples
:type master: obspy.core.trace.Trace
:param master: Master trace to align to, if set to False will align to \
the largest amplitude trace (default)
:type positive: bool
:param positive: Return the maximum positive cross-correlation, or the \
absolute maximum, defaults to False (absolute maximum).
:type plot: bool
:param plot: If true, will plot each trace aligned with the master.
:returns: list of shifts and correlations for best alignment in seconds.
:rtype: list
"""
from eqcorrscan.utils.plotting import xcorr_plot
traces = deepcopy(trace_list)
if not master:
# Use trace with largest MAD amplitude as master
master = traces[0]
MAD_master = np.median(np.abs(master.data))
for i in range(1, len(traces)):
if np.median(np.abs(traces[i])) > MAD_master:
master = traces[i]
MAD_master = np.median(np.abs(master.data))
else:
print('Using master given by user')
shifts = []
ccs = []
for i in range(len(traces)):
if not master.stats.sampling_rate == traces[i].stats.sampling_rate:
raise ValueError('Sampling rates not the same')
cc_vec = normxcorr2(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32))
cc_vec = cc_vec[0]
shift = np.abs(cc_vec).argmax()
cc = cc_vec[shift]
if plot:
xcorr_plot(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32), shift=shift,
cc=cc)
shift -= shift_len
if cc < 0 and positive:
cc = cc_vec.max()
shift = cc_vec.argmax() - shift_len
shifts.append(shift / master.stats.sampling_rate)
ccs.append(cc)
return shifts, ccs
if __name__ == "__main__":
import doctest
doctest.testmod()
|
lgpl-3.0
| 6,233,989,075,923,252,000 | 35.573099 | 79 | 0.624081 | false | 3.948232 | false | false | false |
gspilio/nova
|
nova/network/quantumv2/api.py
|
1
|
41934
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import time
from oslo.config import cfg
from nova.compute import instance_types
from nova import conductor
from nova import context
from nova.db import base
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova.network import quantumv2
from nova.network.security_group import openstack_driver
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
quantum_opts = [
cfg.StrOpt('quantum_url',
default='http://127.0.0.1:9696',
help='URL for connecting to quantum'),
cfg.IntOpt('quantum_url_timeout',
default=30,
help='timeout value for connecting to quantum in seconds'),
cfg.StrOpt('quantum_admin_username',
help='username for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_password',
help='password for connecting to quantum in admin context',
secret=True),
cfg.StrOpt('quantum_admin_tenant_name',
help='tenant name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_region_name',
help='region name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_auth_url',
default='http://localhost:5000/v2.0',
help='auth url for connecting to quantum in admin context'),
cfg.BoolOpt('quantum_api_insecure',
default=False,
help='if set, ignore any SSL validation issues'),
cfg.StrOpt('quantum_auth_strategy',
default='keystone',
help='auth strategy for connecting to '
'quantum in admin context'),
# TODO(berrange) temporary hack until Quantum can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('quantum_ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
cfg.IntOpt('quantum_extension_sync_interval',
default=600,
help='Number of seconds before querying quantum for'
' extensions'),
]
CONF = cfg.CONF
CONF.register_opts(quantum_opts)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
NET_EXTERNAL = 'router:external'
refresh_cache = network_api.refresh_cache
update_instance_info_cache = network_api.update_instance_cache_with_nw_info
class API(base.Base):
"""API for interacting with the quantum 2.x API."""
conductor_api = conductor.API()
security_group_api = openstack_driver.get_openstack_security_group_driver()
def __init__(self):
super(API, self).__init__()
self.last_quantum_extension_sync = None
self.extensions = {}
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
quantum = quantumv2.get_client(context)
# If user has specified to attach instance only to specific
# networks, add them to **search_opts
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {"tenant_id": project_id, 'shared': False}
if net_ids:
search_opts['id'] = net_ids
nets = quantum.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
if net_ids:
search_opts['id'] = net_ids
nets += quantum.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
@refresh_cache
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate network resources for the instance.
TODO(someone): document the rest of these parameters.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: QuantumV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
"""
hypervisor_macs = kwargs.get('macs', None)
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
quantum = quantumv2.get_client(context)
LOG.debug(_('allocate_for_instance() for %s'),
instance['display_name'])
if not instance['project_id']:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance['display_name'])
requested_networks = kwargs.get('requested_networks')
ports = {}
fixed_ips = {}
net_ids = []
if requested_networks:
for network_id, fixed_ip, port_id in requested_networks:
if port_id:
port = quantum.show_port(port_id)['port']
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
raise exception.PortNotUsable(port_id=port_id,
instance=instance['display_name'])
else:
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
network_id = port['network_id']
ports[network_id] = port
elif fixed_ip and network_id:
fixed_ips[network_id] = fixed_ip
if network_id:
net_ids.append(network_id)
nets = self._get_available_networks(context, instance['project_id'],
net_ids)
security_groups = kwargs.get('security_groups', [])
security_group_ids = []
# TODO(arosen) Should optimize more to do direct query for security
# group if len(security_groups) == 1
if len(security_groups):
search_opts = {'tenant_id': instance['project_id']}
user_security_groups = quantum.list_security_groups(
**search_opts).get('security_groups')
for security_group in security_groups:
name_match = None
uuid_match = None
for user_security_group in user_security_groups:
if user_security_group['name'] == security_group:
if name_match:
msg = (_("Multiple security groups found matching"
" '%s'. Use an ID to be more specific."),
security_group)
raise exception.NoUniqueMatch(msg)
name_match = user_security_group['id']
if user_security_group['id'] == security_group:
uuid_match = user_security_group['id']
# If a user names the security group the same as
# another's security groups uuid, the name takes priority.
if not name_match and not uuid_match:
raise exception.SecurityGroupNotFound(
security_group_id=security_group)
security_group_ids.append(name_match)
elif name_match:
security_group_ids.append(name_match)
elif uuid_match:
security_group_ids.append(uuid_match)
touched_port_ids = []
created_port_ids = []
for network in nets:
# If security groups are requested on an instance then the
# network must has a subnet associated with it. Some plugins
# implement the port-security extension which requires
# 'port_security_enabled' to be True for security groups.
# That is why True is returned if 'port_security_enabled'
# is not found.
if (security_groups and not (
network['subnets']
and network.get('port_security_enabled', True))):
raise exception.SecurityGroupCannotBeApplied()
network_id = network['id']
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
try:
port = ports.get(network_id)
if port:
quantum.update_port(port['id'], port_req_body)
touched_port_ids.append(port['id'])
else:
fixed_ip = fixed_ips.get(network_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance['project_id']
if security_group_ids:
port_req_body['port']['security_groups'] = (
security_group_ids)
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance['display_name'])
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
self._populate_quantum_extension_values(instance,
port_req_body)
created_port_ids.append(
quantum.create_port(port_req_body)['port']['id'])
except Exception:
with excutils.save_and_reraise_exception():
for port_id in touched_port_ids:
port_in_server = quantum.show_port(port_id).get('port')
if not port_in_server:
raise Exception(_('Port not found'))
port_req_body = {'port': {'device_id': None}}
quantum.update_port(port_id, port_req_body)
for port_id in created_port_ids:
try:
quantum.delete_port(port_id)
except Exception as ex:
msg = _("Fail to delete port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': port_id,
'exception': ex})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_add_security_group_refresh(context, instance)
nw_info = self._get_instance_nw_info(context, instance, networks=nets)
# NOTE(danms): Only return info about ports we created in this run.
# In the initial allocation case, this will be everything we created,
# and in later runs will only be what was created that time. Thus,
# this only affects the attach case, not the original use for this
# method.
return network_model.NetworkInfo([port for port in nw_info
if port['id'] in created_port_ids +
touched_port_ids])
def _refresh_quantum_extensions_cache(self):
if (not self.last_quantum_extension_sync or
((time.time() - self.last_quantum_extension_sync)
>= CONF.quantum_extension_sync_interval)):
quantum = quantumv2.get_client(context.get_admin_context())
extensions_list = quantum.list_extensions()['extensions']
self.last_quantum_extension_sync = time.time()
self.extensions.clear()
self.extensions = dict((ext['name'], ext)
for ext in extensions_list)
def _populate_quantum_extension_values(self, instance, port_req_body):
self._refresh_quantum_extensions_cache()
if 'nvp-qos' in self.extensions:
instance_type = instance_types.extract_instance_type(instance)
rxtx_factor = instance_type.get('rxtx_factor')
port_req_body['port']['rxtx_factor'] = rxtx_factor
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug(_('deallocate_for_instance() for %s'),
instance['display_name'])
search_opts = {'device_id': instance['uuid']}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
for port in ports:
try:
quantumv2.get_client(context).delete_port(port['id'])
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(portid)s ")
% {'portid': port['id']})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
@refresh_cache
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None,
conductor_api=None):
return self.allocate_for_instance(context, instance,
requested_networks=[(network_id, requested_ip, port_id)],
conductor_api=conductor_api)
@refresh_cache
def deallocate_port_for_instance(self, context, instance, port_id,
conductor_api=None):
try:
quantumv2.get_client(context).delete_port(port_id)
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(port_id)s ") %
locals())
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
return self._get_instance_nw_info(context, instance)
def list_ports(self, context, **search_opts):
return quantumv2.get_client(context).list_ports(**search_opts)
def show_port(self, context, port_id):
return quantumv2.get_client(context).show_port(port_id)
def get_instance_nw_info(self, context, instance, conductor_api=None,
networks=None):
result = self._get_instance_nw_info(context, instance, networks)
update_instance_info_cache(self, context, instance, result,
conductor_api)
return result
def _get_instance_nw_info(self, context, instance, networks=None):
LOG.debug(_('get_instance_nw_info() for %s'),
instance['display_name'])
nw_info = self._build_network_info_model(context, instance, networks)
return network_model.NetworkInfo.hydrate(nw_info)
@refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id,
conductor_api=None):
"""Add a fixed ip to the instance from specified network."""
search_opts = {'network_id': network_id}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'network_id': network_id}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
for subnet in ipam_subnets:
fixed_ips = p['fixed_ips']
fixed_ips.append({'subnet_id': subnet['id']})
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
return
except Exception as ex:
msg = _("Unable to update port %(portid)s on subnet "
"%(subnet_id)s with failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'],
'subnet_id': subnet['id'],
'exception': ex})
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
@refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address,
conductor_api=None):
"""Remove a fixed ip from the instance."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance['uuid'], ip=address)
def validate_networks(self, context, requested_networks):
"""Validate that the tenant can use the requested networks."""
LOG.debug(_('validate_networks() for %s'),
requested_networks)
if not requested_networks:
return
net_ids = []
for (net_id, _i, port_id) in requested_networks:
if not port_id:
net_ids.append(net_id)
continue
port = quantumv2.get_client(context).show_port(port_id).get('port')
if not port:
raise exception.PortNotFound(port_id=port_id)
if port.get('device_id', None):
raise exception.PortInUse(port_id=port_id)
net_id = port['network_id']
if net_id in net_ids:
raise exception.NetworkDuplicated(network_id=net_id)
net_ids.append(net_id)
nets = self._get_available_networks(context, context.project_id,
net_ids)
if len(nets) != len(net_ids):
requsted_netid_set = set(net_ids)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requsted_netid_set - returned_netid_set
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given ip address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Return a list of dicts in the form of
[{'instance_uuid': uuid}] that matched the ip filter.
"""
# filters['ip'] is composed as '^%s$' % fixed_ip.replace('.', '\\.')
ip = filters.get('ip')
# we remove ^$\ in the ip filer
if ip[0] == '^':
ip = ip[1:]
if ip[-1] == '$':
ip = ip[:-1]
ip = ip.replace('\\.', '.')
return self._get_instance_uuids_by_ip(context, ip)
def trigger_instance_add_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_add_security_group', instance_ref, group['name'])
def trigger_instance_remove_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_remove_security_group', instance_ref, group['name'])
def trigger_security_group_members_refresh(self, context, instance_ref):
admin_context = context.elevated()
group_ids = [group['id'] for group in instance_ref['security_groups']]
self.conductor_api.security_groups_trigger_members_refresh(
admin_context, group_ids)
self.conductor_api.security_groups_trigger_handler(admin_context,
'security_group_members', group_ids)
def _get_port_id_by_fixed_address(self, client,
instance, address):
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating ip with a fixed ip."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
def get_all(self, context):
client = quantumv2.get_client(context)
networks = client.list_networks().get('networks') or {}
for network in networks:
network['label'] = network['name']
return networks
def get(self, context, network_uuid):
client = quantumv2.get_client(context)
network = client.show_network(network_uuid).get('network') or {}
network['label'] = network['name']
return network
def delete(self, context, network_uuid):
raise NotImplementedError()
def disassociate(self, context, network_uuid):
raise NotImplementedError()
def get_fixed_ip(self, context, id):
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, client, port_id):
if not port_id:
return {}
port = client.show_port(port_id)['port']
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return dict([(i['id'], i) for i in pools])
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return dict([(p['id'], p) for p in ports])
def get_floating_ip(self, context, id):
client = quantumv2.get_client(context)
fip = client.show_floatingip(id)['floatingip']
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
client = quantumv2.get_client(context)
pools = self._get_floating_ip_pools(client)
return [{'name': n['name'] or n['id']} for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Quantum v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Quantum v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = quantumv2.get_client(context)
project_id = context.project_id
fips = client.list_floatingips(tenant_id=project_id)['floatingips']
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return []
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating ip's fixed ip is allocated to."""
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
port = client.show_port(fip['port_id'])['port']
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating ip to a project from a pool."""
client = quantumv2.get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
# TODO(amotoki): handle exception during create_floatingip()
# At this timing it is ensured that a network for pool exists.
# quota error may be returned.
param = {'floatingip': {'floating_network_id': pool_id}}
fip = client.create_floatingip(param)
return fip['floatingip']['floating_ip_address']
def _get_floating_ip_by_address(self, client, address):
"""Get floatingip from floating ip address."""
data = client.list_floatingips(floating_ip_address=address)
fips = data['floatingips']
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def _get_floating_ips_by_fixed_and_port(self, client, fixed_ip, port):
"""Get floatingips from fixed ip and port."""
data = client.list_floatingips(fixed_ip_address=fixed_ip, port_id=port)
return data['floatingips']
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating ip with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating ip from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _build_network_info_model(self, context, instance, networks=None):
search_opts = {'tenant_id': instance['project_id'],
'device_id': instance['uuid'], }
client = quantumv2.get_client(context, admin=True)
data = client.list_ports(**search_opts)
ports = data.get('ports', [])
if networks is None:
networks = self._get_available_networks(context,
instance['project_id'])
else:
# ensure ports are in preferred network order
_ensure_requested_network_ordering(
lambda x: x['network_id'],
ports,
[n['id'] for n in networks])
nw_info = network_model.NetworkInfo()
for port in ports:
network_name = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
break
if network_name is None:
raise exception.NotFound(_('Network %(net)s for '
'port %(port_id)s not found!') %
{'net': port['network_id'],
'port': port['id']})
network_IPs = []
for fixed_ip in port['fixed_ips']:
fixed = network_model.FixedIP(address=fixed_ip['ip_address'])
floats = self._get_floating_ips_by_fixed_and_port(
client, fixed_ip['ip_address'], port['id'])
for ip in floats:
fip = network_model.IP(address=ip['floating_ip_address'],
type='floating')
fixed.add_floating_ip(fip)
network_IPs.append(fixed)
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
bridge = None
ovs_interfaceid = None
# Network model metadata
should_create_bridge = None
vif_type = port.get('binding:vif_type')
# TODO(berrange) Quantum should pass the bridge name
# in another binding metadata field
if vif_type == network_model.VIF_TYPE_OVS:
bridge = CONF.quantum_ovs_bridge
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = "brq" + port['network_id']
should_create_bridge = True
if bridge is not None:
bridge = bridge[:network_model.NIC_NAME_LEN]
devname = "tap" + port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=net['tenant_id']
)
network['subnets'] = subnets
if should_create_bridge is not None:
network['should_create_bridge'] = should_create_bridge
nw_info.append(network_model.VIF(
id=port['id'],
address=port['mac_address'],
network=network,
type=port.get('binding:vif_type'),
ovs_interfaceid=ovs_interfaceid,
devname=devname))
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = quantumv2.get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
# TODO(gongysh) get the routes for this subnet
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
|
apache-2.0
| 8,649,179,692,638,891,000 | 43.374603 | 79 | 0.563147 | false | 4.311536 | false | false | false |
Chealion/yycbike
|
archive/weatherLoad.py
|
1
|
6271
|
#! /usr/bin/python
# :set tabstop=4 shiftwidth=4 expandtab
# Downoads Environment Canada data and sends the data to Graphite. Additionally logs the data to a file we can use to import later
import csv
import time
import graphitesend
import urllib2
from datetime import date, timedelta
import datetime
graphitesend.init(graphite_server='localhost',prefix='yycbike',system_name='')
metriclog = open('/home/ubuntu/devmetriclog.log', 'a')
# Watch out for timezones - this script fails to function past 5 PM MST.
yesterday = date.today() - timedelta(1)
year = yesterday.strftime('%Y')
month = yesterday.strftime('%m')
day = yesterday.strftime('%d')
#Installations
# URLs per ftp://ftp.tor.ec.gc.ca/Pub/Get_More_Data_Plus_de_donnees/Readme.txt
HOURLY_URL='http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=1'
DAILY_URL= 'http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=2'
## HOURLY
url = HOURLY_URL
print 'Loading Hourly Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 17 lines - up to and inlcuding header line
cleaned_data = '\n'.join(csv_data.split('\n')[17:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Time', 'Quality', 'Temp', 'TempFlag', 'DewPoint', 'DewPointFlag', 'Humidity', 'HumFlag', 'WindDir', 'WindFlag', 'WindSpd', 'WindFlg', 'Visbility', 'VisFlag', 'Pressure', 'PressFlag', 'Humidex', 'HmdxFlag', 'WindChill', 'WindChillFlag', 'Weather'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d %H:%M").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
# Data Cleaning - Wind Chill or Humidex - merge
if row['Temp'] is None or row['Temp'] == '':
continue
if row['Humidex'] == '' and row['WindChill'] == '':
feelslike = row['Temp']
elif row['Humidex'] == '':
feelslike = row['WindChill']
else:
feelslike = row['Humidex']
if row['WindSpd'] == '':
row['WindSpd'] = 0
if row['WindDir'] == '':
row['WindDir'] = 0
metric_string = 'weather.hourly.temp ' + str(row['Temp']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.temp', str(row['Temp']), timestamp)
metric_string = 'weather.hourly.windspeed ' + str(row['WindSpd']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.windspeed', str(row['WindSpd']), timestamp)
metric_string = 'weather.hourly.winddir ' + str(row['WindDir']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.winddir', str(row['WindDir']), timestamp)
metric_string = 'weather.hourly.humidity ' + str(row['Humidity']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.humidity', str(row['Humidity']), timestamp)
metric_string = 'weather.hourly.feelslike ' + str(feelslike) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.feelslike', str(feelslike), timestamp)
## DAILY
url = DAILY_URL
print 'Loading Daily Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 26 lines - up to and including header line
cleaned_data = '\n'.join(csv_data.split('\n')[26:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Quality', 'Max', 'MaxFlag', 'Min', 'MinFlag', 'Mean', 'MeanFlag', 'Heat1', 'Heat2', 'Heat3', 'Heat4', 'Rain', 'RainFlag', 'Snow', 'SnowFlag', 'TotalPrecip', 'PrecipFlag', 'SnowonGround', 'SnowFlag', 'Wind1', 'Wind2', 'Wind3', 'Wind4'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
if row['Max'] is None or row['Max'] == '' or row['Min'] == '':
continue
metric_string = 'weather.daily.high ' + str(row['Max']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.high', str(row['Max']), timestamp)
metric_string = 'weather.daily.low ' + str(row['Min']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.low', str(row['Min']), timestamp)
metric_string = 'weather.daily.mean ' + str(row['Mean']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.mean', str(row['Mean']), timestamp)
# Data Cleaning
if row['TotalPrecip'] == '':
row['TotalPrecip'] = 0
metric_string = 'weather.daily.precip ' + str(row['TotalPrecip']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.precip', str(row['TotalPrecip']), timestamp)
# Data Cleaning
if row['SnowonGround'] == '':
row['SnowonGround'] = 0
metric_string = 'weather.daily.snowamt ' + str(row['SnowonGround']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.snowamt', str(row['SnowonGround']), timestamp)
# OUTPUT FORMAT:
# <metric path> <metric value> <metric timestamp>
# yycbike.peacebridge.north.trips 5 123456789
metriclog.close()
print 'Done.'
|
mit
| 2,237,672,961,989,469,700 | 39.986928 | 346 | 0.635784 | false | 3.267848 | false | false | false |
Ziqi-Li/bknqgis
|
bokeh/bokeh/server/server.py
|
1
|
10467
|
''' Provides a Server which instantiates Application instances as clients connect
'''
from __future__ import absolute_import, print_function
import atexit
import logging
log = logging.getLogger(__name__)
import signal
import tornado
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado import netutil
from .tornado import BokehTornado
from bokeh import __version__
from bokeh.application import Application
from bokeh.resources import DEFAULT_SERVER_PORT
def _create_hosts_whitelist(host_list, port):
if not host_list:
return ['localhost:' + str(port)]
hosts = []
for host in host_list:
if '*' in host:
log.warning(
"Host wildcard %r will allow websocket connections originating "
"from multiple (or possibly all) hostnames or IPs. Use non-wildcard "
"values to restrict access explicitly", host)
if host == '*':
# do not append the :80 port suffix in that case: any port is
# accepted
hosts.append(host)
continue
parts = host.split(':')
if len(parts) == 1:
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host+":80")
elif len(parts) == 2:
try:
int(parts[1])
except ValueError:
raise ValueError("Invalid port in host value: %s" % host)
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host)
else:
raise ValueError("Invalid host value: %s" % host)
return hosts
def _bind_sockets(address, port):
'''Like tornado.netutil.bind_sockets(), but also returns the
assigned port number.
'''
ss = netutil.bind_sockets(port=port or 0, address=address)
assert len(ss)
ports = {s.getsockname()[1] for s in ss}
assert len(ports) == 1, "Multiple ports assigned??"
actual_port = ports.pop()
if port:
assert actual_port == port
return ss, actual_port
class Server(object):
''' A Server which creates a new Session for each connection, using an Application to initialize each Session.
Args:
applications (dict of str: bokeh.application.Application) or bokeh.application.Application:
mapping from URL paths to Application instances, or a single Application to put at the root URL
The Application is a factory for Document, with a new Document initialized for each Session.
Each application should be identified by a path meant to go in a URL, like "/" or "/foo"
Kwargs:
num_procs (str):
Number of worker processes for an app. Default to one. Using 0 will autodetect number of cores
tornado_server_kwargs (dict):
Additional arguments passed to tornado.httpserver.HTTPServer. E.g. max_buffer_size to
specify the maximum upload size. More details can be found at:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
'''
def __init__(self, applications, io_loop=None, tornado_server_kwargs=None, **kwargs):
log.info("Starting Bokeh server version %s (running on Tornado %s)" % (__version__, tornado.version))
if isinstance(applications, Application):
self._applications = { '/' : applications }
else:
self._applications = applications
tornado_kwargs = { key: kwargs[key] for key in ['extra_patterns',
'secret_key',
'sign_sessions',
'generate_session_ids',
'keep_alive_milliseconds',
'check_unused_sessions_milliseconds',
'unused_session_lifetime_milliseconds',
'stats_log_frequency_milliseconds',
]
if key in kwargs }
prefix = kwargs.get('prefix')
if prefix is None:
prefix = ""
prefix = prefix.strip("/")
if prefix:
prefix = "/" + prefix
self._prefix = prefix
self._started = False
self._stopped = False
port = kwargs.get('port', DEFAULT_SERVER_PORT)
self._address = kwargs.get('address') or None
if tornado_server_kwargs is None:
tornado_server_kwargs = {}
tornado_server_kwargs.setdefault('xheaders', kwargs.get('use_xheaders', False))
self._num_procs = kwargs.get('num_procs', 1)
if self._num_procs != 1:
assert all(app.safe_to_fork for app in self._applications.values()), (
'User code has ran before attempting to run multiple '
'processes. This is considered an unsafe operation.')
sockets, self._port = _bind_sockets(self._address, port)
try:
tornado_kwargs['extra_websocket_origins'] = _create_hosts_whitelist(kwargs.get('allow_websocket_origin'), self._port)
tornado_kwargs['use_index'] = kwargs.get('use_index', True)
tornado_kwargs['redirect_root'] = kwargs.get('redirect_root', True)
self._tornado = BokehTornado(self._applications, self.prefix, **tornado_kwargs)
self._http = HTTPServer(self._tornado, **tornado_server_kwargs)
self._http.start(self._num_procs)
self._http.add_sockets(sockets)
except Exception:
for s in sockets:
s.close()
raise
# Can only instantiate the IO loop after HTTPServer.start() was
# called because of `num_procs`, see issue #5524
if io_loop is None:
io_loop = IOLoop.current()
self._loop = io_loop
self._tornado.initialize(io_loop=io_loop, **tornado_kwargs)
@property
def port(self):
'''The actual port number the server is listening on for HTTP
requests.
'''
return self._port
@property
def address(self):
'''The address the server is listening on for HTTP requests
(may be empty or None).
'''
return self._address
@property
def prefix(self):
return self._prefix
@property
def io_loop(self):
return self._loop
def start(self):
''' Start the Bokeh Server and its background tasks.
Notes:
This method does not block and does not affect the state of
the Tornado I/O loop. You must start and stop the loop yourself.
'''
assert not self._started, "Already started"
self._started = True
self._tornado.start()
def stop(self, wait=True):
''' Stop the Bokeh Server.
Args:
fast (boolean): whether to wait for orderly cleanup (default: True)
Returns:
None
'''
assert not self._stopped, "Already stopped"
self._stopped = True
self._tornado.stop(wait)
self._http.stop()
def run_until_shutdown(self):
''' Run the Bokeh Server until shutdown is requested by the user,
either via a Keyboard interrupt (Ctrl-C) or SIGTERM.
'''
if not self._started:
self.start()
# Install shutdown hooks
atexit.register(self._atexit)
signal.signal(signal.SIGTERM, self._sigterm)
try:
self._loop.start()
except KeyboardInterrupt:
print("\nInterrupted, shutting down")
self.stop()
_atexit_ran = False
def _atexit(self):
if self._atexit_ran:
return
self._atexit_ran = True
log.debug("Shutdown: cleaning up")
if not self._stopped:
self.stop(wait=False)
def _sigterm(self, signum, frame):
print("Received signal %d, shutting down" % (signum,))
# Tell self._loop.start() to return.
self._loop.add_callback_from_signal(self._loop.stop)
def unlisten(self):
'''Stop listening on ports (Server will no longer be usable after calling this)
Returns:
None
'''
self._http.close_all_connections()
self._http.stop()
def get_session(self, app_path, session_id):
'''Gets a session by name (session must already exist)'''
return self._tornado.get_session(app_path, session_id)
def get_sessions(self, app_path=None):
'''Gets all live sessions for an application.'''
if app_path is not None:
return self._tornado.get_sessions(app_path)
all_sessions = []
for path in self._tornado.app_paths:
all_sessions += self._tornado.get_sessions(path)
return all_sessions
def show(self, app_path, browser=None, new='tab'):
''' Opens an app in a browser window or tab.
Useful for testing server applications on your local desktop but
should not call when running bokeh-server on an actual server.
Args:
app_path (str) : the app path to open
The part of the URL after the hostname:port, with leading slash.
browser (str, optional) : browser to show with (default: None)
For systems that support it, the **browser** argument allows
specifying which browser to display in, e.g. "safari", "firefox",
"opera", "windows-default" (see the ``webbrowser`` module
documentation in the standard lib for more details).
new (str, optional) : window or tab (default: "tab")
If ``new`` is 'tab', then opens a new tab.
If ``new`` is 'window', then opens a new window.
Returns:
None
'''
if not app_path.startswith("/"):
raise ValueError("app_path must start with a /")
address_string = 'localhost'
if self.address is not None and self.address != '':
address_string = self.address
url = "http://%s:%d%s%s" % (address_string, self.port, self.prefix, app_path)
from bokeh.util.browser import view
view(url, browser=browser, new=new)
|
gpl-2.0
| 7,007,244,162,705,073,000 | 35.217993 | 129 | 0.572179 | false | 4.513583 | false | false | false |
jelly/calibre
|
src/calibre/db/cli/cmd_catalog.py
|
2
|
3866
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from calibre.customize.ui import available_catalog_formats, plugin_for_catalog_format
from calibre.db.cli import integers_from_string
readonly = True
version = 0 # change this if you change signature of implementation()
needs_srv_ctx = True
no_remote = True
def implementation(db, notify_changes, ctx):
raise NotImplementedError()
def option_parser(get_parser, args): # {{{
def add_plugin_parser_options(fmt, parser):
# Fetch the extension-specific CLI options from the plugin
# library.catalogs.<format>.py
plugin = plugin_for_catalog_format(fmt)
p = parser.add_option_group(_('{} OPTIONS').format(fmt.upper()))
for option in plugin.cli_options:
if option.action:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
action=option.action,
help=option.help
)
else:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
help=option.help
)
# Entry point
parser = get_parser(
_(
'''\
%prog catalog /path/to/destination.(csv|epub|mobi|xml...) [options]
Export a catalog in format specified by path/to/destination extension.
Options control how entries are displayed in the generated catalog output.
Note that different catalog formats support different sets of options.
'''
)
)
# Add options common to all catalog plugins
parser.add_option(
'-i',
'--ids',
default=None,
dest='ids',
help=_(
"Comma-separated list of database IDs to catalog.\n"
"If declared, --search is ignored.\n"
"Default: all"
)
)
parser.add_option(
'-s',
'--search',
default=None,
dest='search_text',
help=_(
"Filter the results by the search query. "
"For the format of the search query, please see "
"the search-related documentation in the User Manual.\n"
"Default: no filtering"
)
)
parser.add_option(
'-v',
'--verbose',
default=False,
action='store_true',
dest='verbose',
help=_('Show detailed output information. Useful for debugging')
)
fmt = 'epub'
if args and '.' in args[0]:
fmt = args[0].rpartition('.')[-1].lower()
if fmt not in available_catalog_formats():
fmt = 'epub'
# Add options specific to fmt plugin
add_plugin_parser_options(fmt, parser)
return parser
# }}}
def main(opts, args, dbctx):
if len(args) < 1:
raise SystemExit(_('You must specify a catalog output file'))
if opts.ids:
opts.ids = list(integers_from_string(opts.ids))
fmt = args[0].rpartition('.')[-1]
if fmt not in available_catalog_formats():
raise SystemExit(
_('Cannot generate a catalog in the {} format').format(fmt.upper())
)
# No support for connected device in CLI environment
# Parallel initialization in calibre.gui2.tools:generate_catalog()
opts.connected_device = {
'is_device_connected': False,
'kind': None,
'name': None,
'save_template': None,
'serial': None,
'storage': None,
}
dest = os.path.abspath(os.path.expanduser(args[0]))
plugin = plugin_for_catalog_format(fmt)
with plugin:
plugin.run(dest, opts, dbctx.db)
return 0
|
gpl-3.0
| 3,301,390,566,288,786,000 | 28.51145 | 85 | 0.579152 | false | 4.170442 | false | false | false |
geotagx/geotagx-pybossa-archive
|
pybossa/auth/task.py
|
1
|
1535
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
import pybossa.model as model
from pybossa.core import db
def create(task=None):
if not current_user.is_anonymous():
app = db.session.query(model.App).filter_by(id=task.app_id).one()
if app.owner_id == current_user.id or current_user.admin is True:
return True
else:
return False
else:
return False
def read(task=None):
return True
def update(task):
if not current_user.is_anonymous():
app = db.session.query(model.App).filter_by(id=task.app_id).one()
if app.owner_id == current_user.id or current_user.admin is True:
return True
else:
return False
else:
return False
def delete(task):
return update(task)
|
agpl-3.0
| -5,745,328,043,428,878,000 | 29.098039 | 77 | 0.683388 | false | 3.725728 | false | false | false |
MasterGowen/moonrain
|
moonrain/accounts/models.py
|
1
|
2939
|
from django.db import models
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser
from ..projects.models import Project
class UserManager(BaseUserManager):
def create_user(self, email, username, password=None):
if not email:
raise ValueError('Необходимо ввести электронный адрес')
user = self.model(
email=UserManager.normalize_email(email),
username=username,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, password):
user = self.create_user(email,
password=password,
username=username)
user.is_admin = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
'''
Пользователь
'''
email = models.EmailField(
verbose_name='Электронная почта',
max_length=32,
unique=True,
db_index=True,
)
username = models.CharField(
verbose_name='Имя пользователя',
blank=False,
max_length=32,
unique=True,
)
avatar = models.ImageField(
verbose_name='Аватар',
upload_to='images/%Y/%m',
blank=True,
)
first_name = models.CharField(
verbose_name='Имя',
max_length=16,
blank=True,
)
last_name = models.CharField(
verbose_name='Фамилия',
max_length=32,
blank=True,
)
department = models.CharField(
verbose_name='Подразделение',
max_length=255,
blank=True,
)
is_admin = models.BooleanField(
verbose_name='Является администратором?',
default=False,
)
is_superuser = models.BooleanField(
verbose_name='Является суперпользователем?',
default=False,
)
projects = models.ManyToManyField(Project, verbose_name='Проекты',
blank=True,
help_text='Проекты, в которых участвует пользователь',)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
objects = UserManager()
def get_full_name(self):
return '%s %s' % (self.last_name,
self.first_name,)
def get_short_name(self):
return self.username
def __str__(self):
return self.email
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
@property
def is_staff(self):
return self.is_admin
class Meta:
verbose_name = ('Пользователь')
verbose_name_plural = ('Пользователи')
|
gpl-2.0
| 7,740,447,189,795,986,000 | 23.070796 | 93 | 0.573005 | false | 3.433081 | false | false | false |
agaveapi/SC17-container-tutorial
|
content/images/jupyter/examples/setvars.py
|
1
|
2421
|
# Here we define some utility commands to simplify interaction with the shell.
# You don't need to read or understand this, but it's here in case you want to.
import re
import os
def repvar(v):
"""
repvar() is short for "Replace Variables." The idea is that this
function looks for strings of the form $VAR or ${VAR} or even
$(CMD) in the input string and replaces them, either with
the contents of os.environ[VAR] or os.pipe(CMD), mimicking the
behavior of bash. If a backslace precedes the $, then the backslash
will be removed but the string will not be evaluated. Thus:
${HOME} becomes "/home/user"
$HOME becomes "/home/usr"
$(echo Hello) becomes "Hello"
\$HOME becomes $HOME
"""
epos = 0
buf = ''
for g in re.finditer(r'\$((\w+)|\{([^}]*)\}|\(([^())]*)\))|(\\+\$)',v):
if g:
i = 2
while g.group(i) == None:
i += 1
p = g.start(0)
buf += v[epos:p]
epos = p + len(g.group(0))
if i == 4:
fh = os.popen(g.group(i),"r")
c = repvar(fh.read())
fh.close()
elif i == 5:
c = '$'
else:
if not g.group(i) in os.environ:
raise Exception("no such environment variable: "+g.group(i))
c = repvar(os.environ[g.group(i)])
buf += c
else:
break
buf += v[epos:]
return buf.strip()
def setvar(e):
"""
setvar() emulates the ability of BASH to set environment variables.
Thus, NAME=VALUE will set os.environ["NAME"]="VALUE". Bash-style
comments will be stripped, and bash-line continuations will be processed.
"""
e = re.sub(r'#[^\r\n]*','',e)
e = re.sub(r'\\\n\s*','',e)
for m in re.finditer(r'(?m)(\w+)=(.*)',e):
k = m.group(1)
v = repvar(m.group(2))
print(k+"="+v)
os.environ[k]=v
def readfile(f):
"""
Reads in a file. repvar() will be applied to the file name.
"""
n = repvar(f)
print("Reading file `"+n+"'")
fh = open(n)
c = fh.read()
fh.close()
return c
def writefile(f,c):
"""
Writes out a file. repvar() will be applied both to the file name
and the file contents.
"""
n = repvar(f)
print("Writing file `"+n+"'")
fh = open(n,"w")
fh.write(repvar(c))
fh.close()
|
bsd-3-clause
| 6,554,591,777,941,709,000 | 31.28 | 80 | 0.523337 | false | 3.3625 | false | false | false |
Tilo15/PhotoFiddle2
|
PF2/Tools/HueEqualiser.py
|
1
|
5526
|
import cv2
import numpy
import Tool
class HueEqualiser(Tool.Tool):
def on_init(self):
self.id = "hueequaliser"
self.name = "Hue Equaliser"
self.icon_path = "ui/PF2_Icons/HueEqualiser.png"
self.properties = [
Tool.Property("header", "Hue Equaliser", "Header", None, has_toggle=False, has_button=False),
Tool.Property("bleed", "Hue Bleed", "Slider", 0.5, max=2.0, min=0.01),
Tool.Property("neighbour_bleed", "Neighbour Bleed", "Slider", 0.25, max=2.0, min=0.0),
# Red
Tool.Property("header_red", "Red", "Header", None, has_toggle=False, has_button=False),
Tool.Property("red_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("red_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Yellow
Tool.Property("header_yellow", "Yellow", "Header", None, has_toggle=False, has_button=False),
Tool.Property("yellow_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("yellow_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Green
Tool.Property("header_green", "Green", "Header", None, has_toggle=False, has_button=False),
Tool.Property("green_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("green_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Cyan
Tool.Property("header_cyan", "Cyan", "Header", None, has_toggle=False, has_button=False),
Tool.Property("cyan_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("cyan_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Blue
Tool.Property("header_blue", "Blue", "Header", None, has_toggle=False, has_button=False),
Tool.Property("blue_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("blue_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Violet
Tool.Property("header_violet", "Violet", "Header", None, has_toggle=False, has_button=False),
Tool.Property("violet_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("violet_saturation", "Saturation", "Slider", 0, max=50, min=-50),
]
def on_update(self, image):
hues = {
"red": 0,
"yellow": 60,
"green": 120,
"cyan": 180,
"blue": 240,
"violet": 300,
"_red": 360,
}
out = image
if(not self.is_default()):
bleed = self.props["bleed"].get_value()
neighbour_bleed = self.props["neighbour_bleed"].get_value()
out = out.astype(numpy.float32)
# Convert to HSV colorspace
out = cv2.cvtColor(out, cv2.COLOR_BGR2HSV)
# Bits per pixel
bpp = float(str(image.dtype).replace("uint", "").replace("float", ""))
# Pixel value range
np = float(2 ** bpp - 1)
imhue = out[0:, 0:, 0]
imsat = out[0:, 0:, 1]
imval = out[0:, 0:, 2]
for hue in hues:
hsat = self.props["%s_saturation" % hue.replace('_', '')].get_value()
hval = self.props["%s_value" % hue.replace('_', '')].get_value()
isHue = self._is_hue(imhue, hues[hue], (3.5/bleed))
isHue = self._neighbour_bleed(isHue, neighbour_bleed)
imsat = imsat + ((hsat / 10000) * 255) * isHue
imval = imval + ((hval / 1000) * np) * isHue
# Clip any values out of bounds
imval[imval < 0.0] = 0.0
imval[imval > np] = np
imsat[imsat < 0.0] = 0.0
imsat[imsat > 1.0] = 1.0
out[0:, 0:, 1] = imsat
out[0:, 0:, 2] = imval
# Convert back to BGR colorspace
out = cv2.cvtColor(out, cv2.COLOR_HSV2BGR)
out = out.astype(image.dtype)
return out
def _is_hue(self, image, hue_value, bleed_value = 3.5):
mif = hue_value - 30
mir = hue_value + 30
if (mir > 360):
mir = 360
if (mif < 0):
mif = 0
bleed = float(360 / bleed_value)
icopy = image.copy()
print(bleed, mif, mir)
if(mif != 0):
icopy[icopy < mif - bleed] = 0.0
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy < mif) * (icopy != 0.0)] = (((mif - (icopy[(icopy < mif) * (icopy != 0.0)]))/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir)/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy >= mif) * (icopy <= mir)] = 1.0
if(mif == 0):
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir) / 360.0) / (bleed/360.0)) * -1 + 1
return icopy
def _neighbour_bleed(self, map, bleed):
strength = bleed*30
if (strength > 0):
height, width = map.shape[:2]
size = (height * width)
mul = numpy.math.sqrt(size) / 1064.416 # numpy.math.sqrt(1132982.0)
map = map*255
blur_size = abs(2 * round((round(strength * mul) + 1) / 2) - 1)
im = cv2.blur(map, (int(blur_size), int(blur_size)))
return im/255.0
return map
|
gpl-3.0
| 7,938,162,124,587,179,000 | 35.361842 | 136 | 0.500181 | false | 3.139773 | false | false | false |
OCA/sale-workflow
|
sale_product_set/wizard/product_set_add.py
|
1
|
3428
|
# Copyright 2015 Anybox S.A.S
# Copyright 2016-2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api, exceptions, _
import odoo.addons.decimal_precision as dp
class ProductSetAdd(models.TransientModel):
_name = 'product.set.add'
_rec_name = 'product_set_id'
_description = "Wizard model to add product set into a quotation"
order_id = fields.Many2one(
'sale.order', 'Sale Order', required=True,
default=lambda self: self.env.context.get('active_id'),
ondelete='cascade'
)
partner_id = fields.Many2one(
related='order_id.partner_id',
ondelete='cascade'
)
product_set_id = fields.Many2one(
'product.set', 'Product set',
required=True,
ondelete='cascade'
)
quantity = fields.Float(
digits=dp.get_precision('Product Unit of Measure'), required=True,
default=1)
skip_existing_products = fields.Boolean(
default=False,
help='Enable this to not add new lines '
'for products already included in SO lines.'
)
def _check_partner(self):
if self.product_set_id.partner_id:
if self.product_set_id.partner_id != self.order_id.partner_id:
raise exceptions.ValidationError(_(
"Select a product set assigned to "
"the same partner of the order."
))
@api.multi
def add_set(self):
""" Add product set, multiplied by quantity in sale order line """
self._check_partner()
order_lines = self._prepare_order_lines()
if order_lines:
self.order_id.write({
"order_line": order_lines
})
return order_lines
def _prepare_order_lines(self):
max_sequence = self._get_max_sequence()
order_lines = []
for set_line in self._get_lines():
order_lines.append(
(0, 0,
self.prepare_sale_order_line_data(
set_line, max_sequence=max_sequence))
)
return order_lines
def _get_max_sequence(self):
max_sequence = 0
if self.order_id.order_line:
max_sequence = max([
line.sequence for line in self.order_id.order_line
])
return max_sequence
def _get_lines(self):
# hook here to take control on used lines
so_product_ids = self.order_id.order_line.mapped('product_id').ids
for set_line in self.product_set_id.set_line_ids:
if (self.skip_existing_products
and set_line.product_id.id in so_product_ids):
continue
yield set_line
@api.multi
def prepare_sale_order_line_data(self, set_line,
max_sequence=0):
self.ensure_one()
sale_line = self.env['sale.order.line'].new({
'order_id': self.order_id.id,
'product_id': set_line.product_id.id,
'product_uom_qty': set_line.quantity * self.quantity,
'product_uom': set_line.product_id.uom_id.id,
'sequence': max_sequence + set_line.sequence,
'discount': set_line.discount,
})
sale_line.product_id_change()
line_values = sale_line._convert_to_write(sale_line._cache)
return line_values
|
agpl-3.0
| 4,705,311,278,881,153,000 | 34.340206 | 74 | 0.574096 | false | 3.86036 | false | false | false |
wolfelee/luokr.com
|
www.luokr.com/app/ctrls/admin/posts.py
|
1
|
10035
|
#coding=utf-8
from admin import admin, AdminCtrl
class Admin_PostsCtrl(AdminCtrl):
@admin
def get(self):
pager = {}
pager['qnty'] = min(int(self.input('qnty', 10)), 50)
pager['page'] = max(int(self.input('page', 1)), 1)
pager['list'] = 0;
cur_posts = self.dbase('posts').cursor()
cur_users = self.dbase('users').cursor()
cur_posts.execute('select * from posts order by post_id desc limit ? offset ?', (pager['qnty'], (pager['page']-1)*pager['qnty'], ))
posts = cur_posts.fetchall()
psers = {}
if posts:
pager['list'] = len(posts)
cur_users.execute('select * from users where user_id in (' + ','.join(str(i['user_id']) for i in posts) + ')')
psers = self.utils().array_keyto(cur_users.fetchall(), 'user_id')
cur_posts.close()
cur_users.close()
self.render('admin/posts.html', pager = pager, posts = posts, psers = psers)
class Admin_PostHiddenCtrl(AdminCtrl):
@admin
def post(self):
try:
post_id = self.input('post_id')
con = self.dbase('posts')
cur = con.cursor()
cur.execute('update posts set post_stat = 0 where post_id = ?', (post_id, ))
con.commit()
cur.close()
self.flash(1)
except:
self.flash(0)
class Admin_PostCreateCtrl(AdminCtrl):
@admin
def get(self):
cur = self.dbase('terms').cursor()
cur.execute('select * from terms order by term_id desc, term_refc desc limit 9')
terms = cur.fetchall()
cur.close()
mode = self.input('mode', None)
self.render('admin/post-create.html', mode = mode, terms = terms)
@admin
def post(self):
try:
user = self.current_user
post_type = self.input('post_type', 'blog')
post_title = self.input('post_title')
post_descp = self.input('post_descp')
post_author = self.input('post_author')
post_source = self.input('post_source')
post_summary = self.input('post_summary')
post_content = self.input('post_content')
post_rank = self.input('post_rank')
post_stat = self.input('post_stat', 0)
post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S')))
post_ctms = self.stime()
post_utms = post_ctms
term_list = []
for term_name in self.input('term_list').split(' '):
if term_name == '':
continue
term_list.append(term_name)
if len(term_list) > 10:
self.flash(0, {'msg': '标签数量限制不能超过 10 个'})
return
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
term_imap = {}
term_ctms = self.stime()
for term_name in term_list:
cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,))
term_id = cur_terms.fetchone()
if term_id:
term_id = term_id['term_id']
else:
cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, ))
if cur_terms.lastrowid:
term_id = cur_terms.lastrowid
if term_id:
term_imap[term_id] = term_name
cur_posts.execute('insert into posts (user_id, post_type, post_title, post_descp, post_author, post_source, post_summary, post_content,post_stat, post_rank, post_ptms, post_ctms, post_utms) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \
(user['user_id'], post_type, post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_ctms, post_utms ,))
post_id = cur_posts.lastrowid
if term_imap:
for term_id in term_imap:
cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')')
if term_imap:
cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')')
con_posts.commit()
cur_posts.close()
con_terms.commit()
con_terms.close()
self.model('alogs').add(self.dbase('alogs'), '新增文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name'])
self.flash(1, {'url': '/admin/post?post_id=' + str(post_id)})
except:
self.flash(0)
class Admin_PostCtrl(AdminCtrl):
@admin
def get(self):
post_id = self.input('post_id')
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
cur_posts.execute('select * from posts where post_id = ?', (post_id, ))
post = cur_posts.fetchone()
if not post:
cur_posts.close()
return self.send_error(404)
mode = self.input('mode', None)
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
cur_terms.execute('select * from terms order by term_id desc, term_refc desc limit 9')
terms = cur_terms.fetchall()
ptids = {}
ptags = {}
cur_posts.execute('select post_id,term_id from post_terms where post_id = ?', (post_id, ))
ptids = cur_posts.fetchall()
if ptids:
cur_terms.execute('select * from terms where term_id in (' + ','.join(str(i['term_id']) for i in ptids) + ')')
ptags = cur_terms.fetchall()
if ptags:
ptids = self.utils().array_group(ptids, 'post_id')
ptags = self.utils().array_keyto(ptags, 'term_id')
cur_posts.close()
cur_terms.close()
self.render('admin/post.html', mode = mode, post = post, terms = terms, ptids = ptids, ptags = ptags)
@admin
def post(self):
try:
user = self.current_user
post_id = self.input('post_id')
post_title = self.input('post_title')
post_descp = self.input('post_descp')
post_author = self.input('post_author')
post_source = self.input('post_source')
post_summary = self.input('post_summary')
post_content = self.input('post_content')
post_rank = self.input('post_rank')
post_stat = self.input('post_stat', 0)
post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S')))
post_utms = self.stime()
term_list = []
for term_name in self.input('term_list').split(' '):
if term_name == '':
continue
term_list.append(term_name)
if len(term_list) > 10:
self.flash(0, {'msg': '标签数量限制不能超过 10 个'})
return
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
cur_posts.execute('select * from posts where post_id = ?', (post_id, ))
post = cur_posts.fetchone()
if not post:
cur_posts.close()
cur_terms.close()
self.flash(0, '没有指定文章ID')
return
term_imap = {}
term_ctms = self.stime()
for term_name in term_list:
cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,))
term_id = cur_terms.fetchone()
if term_id:
term_id = term_id['term_id']
else:
cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, ))
if cur_terms.lastrowid:
term_id = cur_terms.lastrowid
if term_id:
term_imap[term_id] = term_name
cur_posts.execute('select term_id from post_terms where post_id = ?', (post_id, ))
post_tids = cur_posts.fetchall()
cur_posts.execute('update posts set user_id=?,post_title=?,post_descp=?,post_author=?,post_source=?,post_summary=?,post_content=?,post_stat=?,post_rank=?,post_ptms=?,post_utms=? where post_id=?', \
(user['user_id'], post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_utms, post_id,))
cur_posts.execute('delete from post_terms where post_id = ?', (post_id,))
if term_imap:
for term_id in term_imap:
cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')')
if post_tids:
cur_terms.execute('update terms set term_refc = term_refc - 1 where term_id in (' + ','.join([str(i['term_id']) for i in post_tids]) + ')')
if term_imap:
cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')')
con_posts.commit()
cur_posts.close()
con_terms.commit()
cur_terms.close()
self.model('alogs').add(self.dbase('alogs'), '更新文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name'])
self.flash(1)
except:
self.flash(0)
|
bsd-3-clause
| -2,849,066,017,734,671,000 | 38.519841 | 252 | 0.520835 | false | 3.517838 | false | false | false |
advancedplotting/aplot
|
python/plotserv/api_annotations.py
|
1
|
8009
|
# Copyright (c) 2014-2015, Heliosphere Research LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Handles VIs in "api_annotations".
"""
import numpy as np
from matplotlib import pyplot as plt
from .core import resource
from .terminals import remove_none
from . import filters
from . import errors
@resource('text')
def text(ctx, a):
""" Display text on the plot """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
s = a.string('s')
relative = a.bool('coordinates')
textprops = a.text()
display = a.display()
ctx.set(plotid)
ax = plt.gca()
# None-finite values here mean we skip the plot
if x is None or y is None:
return
k = textprops._k()
k.update(display._k())
k['clip_on'] = True
if relative:
k['transform'] = ax.transAxes
remove_none(k)
plt.text(x, y, s, **k)
@resource('hline')
def hline(ctx, a):
""" Plot a horizontal line """
plotid = a.plotid()
y = a.float('y')
xmin = a.float('xmin')
xmax = a.float('xmax')
line = a.line()
display = a.display()
ctx.set(plotid)
ctx.fail_if_polar()
# Non-finite value provided
if y is None:
return
k = { 'xmin': xmin,
'xmax': xmax,
'linewidth': line.width,
'linestyle': line.style,
'color': line.color if line.color is not None else 'k', }
k.update(display._k())
remove_none(k)
plt.axhline(y, **k)
@resource('vline')
def vline(ctx, a):
""" Plot a vertical line """
plotid = a.plotid()
x = a.float('x')
ymin = a.float('ymin')
ymax = a.float('ymax')
line = a.line()
display = a.display()
ctx.set(plotid)
ctx.fail_if_polar()
# Non-finite value provided
if x is None:
return
k = { 'ymin': ymin,
'ymax': ymax,
'linewidth': line.width,
'linestyle': line.style,
'color': line.color if line.color is not None else 'k', }
k.update(display._k())
remove_none(k)
plt.axvline(x, **k)
@resource('colorbar')
def colorbar(ctx, a):
""" Display a colorbar """
plotid = a.plotid()
label = a.string('label')
ticks = a.dbl_1d('ticks')
ticklabels = a.string_1d('ticklabels')
ctx.set(plotid)
# If no colormapped object has been plotted, MPL complains.
# We permit this, and simply don't add the colorbar.
if ctx.mappable is None:
return
c = plt.colorbar(ctx.mappable)
# Don't bother setting an empty label
if len(label) > 0:
c.set_label(label)
# Both specified
if len(ticks) > 0 and len(ticklabels) > 0:
ticks, ticklabels = filters.filter_1d(ticks, ticklabels)
c.set_ticks(ticks)
c.set_ticklabels(ticklabels)
# Just ticks specified
elif len(ticks) > 0:
ticks = ticks[np.isfinite(ticks)]
c.set_ticks(ticks)
# Just ticklabels specified
else:
# Providing zero-length "ticks" array invokes auto-ticking, in which
# case any ticklabels are ignored.
pass
@resource('legend')
def legend(ctx, a):
""" Represents Legend.vi.
Note that there is no Positions enum on the Python side; the MPL
values are hard-coded into the LabView control.
"""
POSITIONS = { 0: 0,
1: 1,
2: 9,
3: 2,
4: 6,
5: 3,
6: 8,
7: 4,
8: 7,
9: 10 }
plotid = a.plotid()
position = a.enum('position', POSITIONS)
ctx.set(plotid)
k = {'loc': position, 'fontsize': 'medium'}
remove_none(k)
if len(ctx.legend_entries) > 0:
objects, labels = zip(*ctx.legend_entries)
plt.legend(objects, labels, **k)
@resource('label')
def label(ctx, a):
""" Title, X axis and Y axis labels. """
LOCATIONS = {0: 'title', 1: 'xlabel', 2: 'ylabel'}
plotid = a.plotid()
location = a.enum('kind', LOCATIONS)
label = a.string('label')
text = a.text()
ctx.set(plotid)
k = text._k()
if location == 'title':
plt.title(label, **k)
elif location == 'xlabel':
plt.xlabel(label, **k)
elif location == 'ylabel':
ctx.fail_if_polar()
plt.ylabel(label, **k)
else:
pass
@resource('circle')
def circle(ctx, a):
""" Draw a circle on a rectangular plot """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
radius = a.float('radius')
color = a.color('color')
line = a.line()
display = a.display()
f = ctx.set(plotid)
ctx.fail_if_polar()
ctx.fail_if_log_symlog()
# Like Text.vi, if any critical input is Nan we do nothing
if x is None or y is None or radius is None:
return
# Catch this before MPL complains
if radius <= 0:
return
k = { 'edgecolor': line.color,
'linestyle': line.style,
'linewidth': line.width,
'facecolor': color if color is not None else '#bbbbbb', }
k.update(display._k())
remove_none(k)
c = plt.Circle((x,y), radius, **k)
f.gca().add_artist(c)
@resource('rectangle')
def rectangle(ctx, a):
""" Draw a rectangle """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
width = a.float('width')
height = a.float('height')
color = a.color('color')
line = a.line()
display = a.display()
f = ctx.set(plotid)
ctx.fail_if_symlog()
# Like Text.vi, if any critical input is Nan we do nothing
if x is None or y is None or width is None or height is None:
return
if width == 0 or height == 0:
return
k = { 'edgecolor': line.color,
'linestyle': line.style,
'linewidth': line.width,
'facecolor': color if color is not None else '#bbbbbb', }
k.update(display._k())
remove_none(k)
r = plt.Rectangle((x,y), width, height, **k)
f.gca().add_artist(r)
|
bsd-3-clause
| 3,550,723,003,300,049,000 | 25.611296 | 77 | 0.558122 | false | 3.680607 | false | false | false |
henriquegemignani/randovania
|
randovania/gui/main_window.py
|
1
|
25113
|
import functools
import json
import logging
import os
import platform
import subprocess
from functools import partial
from pathlib import Path
from typing import Optional, List
from PySide2 import QtCore, QtWidgets, QtGui
from PySide2.QtCore import QUrl, Signal, Qt
from qasync import asyncSlot
from randovania import VERSION
from randovania.game_description.resources.trick_resource_info import TrickResourceInfo
from randovania.games.game import RandovaniaGame
from randovania.gui.generated.main_window_ui import Ui_MainWindow
from randovania.gui.lib import common_qt_lib, async_dialog, theme
from randovania.gui.lib.trick_lib import used_tricks, difficulties_for_trick
from randovania.gui.lib.window_manager import WindowManager
from randovania.interface_common import update_checker
from randovania.interface_common.enum_lib import iterate_enum
from randovania.interface_common.options import Options
from randovania.interface_common.preset_manager import PresetManager
from randovania.layout.layout_description import LayoutDescription
from randovania.layout.trick_level import LayoutTrickLevel
from randovania.resolver import debug
_DISABLE_VALIDATION_WARNING = """
<html><head/><body>
<p>While it sometimes throws errors, the validation is what guarantees that your seed is completable.<br/>
Do <span style=" font-weight:600;">not</span> disable if you're uncomfortable with possibly unbeatable seeds.
</p><p align="center">Are you sure you want to disable validation?</p></body></html>
"""
def _update_label_on_show(label: QtWidgets.QLabel, text: str):
def showEvent(_):
if label._delayed_text is not None:
label.setText(label._delayed_text)
label._delayed_text = None
label._delayed_text = text
label.showEvent = showEvent
class MainWindow(WindowManager, Ui_MainWindow):
newer_version_signal = Signal(str, str)
options_changed_signal = Signal()
_is_preview_mode: bool = False
menu_new_version: Optional[QtWidgets.QAction] = None
_current_version_url: Optional[str] = None
_options: Options
_data_visualizer: Optional[QtWidgets.QWidget] = None
_map_tracker: QtWidgets.QWidget
_preset_manager: PresetManager
GameDetailsSignal = Signal(LayoutDescription)
InitPostShowSignal = Signal()
@property
def _tab_widget(self):
return self.main_tab_widget
@property
def preset_manager(self) -> PresetManager:
return self._preset_manager
@property
def main_window(self) -> QtWidgets.QMainWindow:
return self
@property
def is_preview_mode(self) -> bool:
return self._is_preview_mode
def __init__(self, options: Options, preset_manager: PresetManager,
network_client, preview: bool):
super().__init__()
self.setupUi(self)
self.setWindowTitle("Randovania {}".format(VERSION))
self._is_preview_mode = preview
self.setAcceptDrops(True)
common_qt_lib.set_default_window_icon(self)
# Remove all hardcoded link color
about_document: QtGui.QTextDocument = self.about_text_browser.document()
about_document.setHtml(about_document.toHtml().replace("color:#0000ff;", ""))
self.browse_racetime_label.setText(self.browse_racetime_label.text().replace("color:#0000ff;", ""))
self.intro_label.setText(self.intro_label.text().format(version=VERSION))
self._preset_manager = preset_manager
self.network_client = network_client
if preview:
debug.set_level(2)
# Signals
self.newer_version_signal.connect(self.display_new_version)
self.options_changed_signal.connect(self.on_options_changed)
self.GameDetailsSignal.connect(self._open_game_details)
self.InitPostShowSignal.connect(self.initialize_post_show)
self.intro_play_now_button.clicked.connect(lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_play))
self.open_faq_button.clicked.connect(self._open_faq)
self.open_database_viewer_button.clicked.connect(partial(self._open_data_visualizer_for_game,
RandovaniaGame.PRIME2))
for game in RandovaniaGame:
self.hint_item_names_game_combo.addItem(game.long_name, game)
self.hint_location_game_combo.addItem(game.long_name, game)
self.hint_item_names_game_combo.currentIndexChanged.connect(self._update_hints_text)
self.hint_location_game_combo.currentIndexChanged.connect(self._update_hint_locations)
self.import_permalink_button.clicked.connect(self._import_permalink)
self.import_game_file_button.clicked.connect(self._import_spoiler_log)
self.browse_racetime_button.clicked.connect(self._browse_racetime)
self.create_new_seed_button.clicked.connect(
lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed))
# Menu Bar
for action, game in ((self.menu_action_prime_1_data_visualizer, RandovaniaGame.PRIME1),
(self.menu_action_prime_2_data_visualizer, RandovaniaGame.PRIME2),
(self.menu_action_prime_3_data_visualizer, RandovaniaGame.PRIME3)):
action.triggered.connect(partial(self._open_data_visualizer_for_game, game))
for action, game in ((self.menu_action_edit_prime_1, RandovaniaGame.PRIME1),
(self.menu_action_edit_prime_2, RandovaniaGame.PRIME2),
(self.menu_action_edit_prime_3, RandovaniaGame.PRIME3)):
action.triggered.connect(partial(self._open_data_editor_for_game, game))
self.menu_action_item_tracker.triggered.connect(self._open_item_tracker)
self.menu_action_map_tracker.triggered.connect(self._on_menu_action_map_tracker)
self.menu_action_edit_existing_database.triggered.connect(self._open_data_editor_prompt)
self.menu_action_validate_seed_after.triggered.connect(self._on_validate_seed_change)
self.menu_action_timeout_generation_after_a_time_limit.triggered.connect(self._on_generate_time_limit_change)
self.menu_action_dark_mode.triggered.connect(self._on_menu_action_dark_mode)
self.menu_action_open_auto_tracker.triggered.connect(self._open_auto_tracker)
self.menu_action_previously_generated_games.triggered.connect(self._on_menu_action_previously_generated_games)
self.menu_action_layout_editor.triggered.connect(self._on_menu_action_layout_editor)
self.menu_prime_1_trick_details.aboutToShow.connect(self._create_trick_details_prime_1)
self.menu_prime_2_trick_details.aboutToShow.connect(self._create_trick_details_prime_2)
self.menu_prime_3_trick_details.aboutToShow.connect(self._create_trick_details_prime_3)
# Setting this event only now, so all options changed trigger only once
options.on_options_changed = self.options_changed_signal.emit
self._options = options
self.main_tab_widget.setCurrentIndex(0)
def closeEvent(self, event):
self.generate_seed_tab.stop_background_process()
super().closeEvent(event)
def dragEnterEvent(self, event: QtGui.QDragEnterEvent):
from randovania.layout.preset_migration import VersionedPreset
valid_extensions = [
LayoutDescription.file_extension(),
VersionedPreset.file_extension(),
]
valid_extensions_with_dot = {
f".{extension}"
for extension in valid_extensions
}
for url in event.mimeData().urls():
ext = os.path.splitext(url.toLocalFile())[1]
if ext in valid_extensions_with_dot:
event.acceptProposedAction()
return
def dropEvent(self, event: QtGui.QDropEvent):
from randovania.layout.preset_migration import VersionedPreset
for url in event.mimeData().urls():
path = Path(url.toLocalFile())
if path.suffix == f".{LayoutDescription.file_extension()}":
self.open_game_details(LayoutDescription.from_file(path))
return
elif path.suffix == f".{VersionedPreset.file_extension()}":
self.main_tab_widget.setCurrentWidget(self.welcome_tab)
self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed)
self.generate_seed_tab.import_preset_file(path)
return
def showEvent(self, event: QtGui.QShowEvent):
self.InitPostShowSignal.emit()
# Delayed Initialization
@asyncSlot()
async def initialize_post_show(self):
self.InitPostShowSignal.disconnect(self.initialize_post_show)
logging.info("Will initialize things in post show")
await self._initialize_post_show_body()
logging.info("Finished initializing post show")
async def _initialize_post_show_body(self):
logging.info("Will load OnlineInteractions")
from randovania.gui.main_online_interaction import OnlineInteractions
logging.info("Creating OnlineInteractions...")
self.online_interactions = OnlineInteractions(self, self.preset_manager, self.network_client, self,
self._options)
logging.info("Will load GenerateSeedTab")
from randovania.gui.generate_seed_tab import GenerateSeedTab
logging.info("Creating GenerateSeedTab...")
self.generate_seed_tab = GenerateSeedTab(self, self, self._options)
logging.info("Running GenerateSeedTab.setup_ui")
self.generate_seed_tab.setup_ui()
# Update hints text
logging.info("Will _update_hints_text")
self._update_hints_text()
logging.info("Will hide hint locations combo")
self.hint_location_game_combo.setVisible(False)
self.hint_location_game_combo.setCurrentIndex(1)
logging.info("Will update for modified options")
with self._options:
self.on_options_changed()
def _update_hints_text(self):
from randovania.gui.lib import hints_text
hints_text.update_hints_text(self.hint_item_names_game_combo.currentData(), self.hint_item_names_tree_widget)
def _update_hint_locations(self):
from randovania.gui.lib import hints_text
hints_text.update_hint_locations(self.hint_location_game_combo.currentData(), self.hint_tree_widget)
# Generate Seed
def _open_faq(self):
self.main_tab_widget.setCurrentWidget(self.help_tab)
self.help_tab_widget.setCurrentWidget(self.tab_faq)
async def generate_seed_from_permalink(self, permalink):
from randovania.interface_common.status_update_lib import ProgressUpdateCallable
from randovania.gui.dialog.background_process_dialog import BackgroundProcessDialog
def work(progress_update: ProgressUpdateCallable):
from randovania.interface_common import simplified_patcher
layout = simplified_patcher.generate_layout(progress_update=progress_update,
permalink=permalink,
options=self._options)
progress_update(f"Success! (Seed hash: {layout.shareable_hash})", 1)
return layout
new_layout = await BackgroundProcessDialog.open_for_background_task(work, "Creating a game...")
self.open_game_details(new_layout)
@asyncSlot()
async def _import_permalink(self):
from randovania.gui.dialog.permalink_dialog import PermalinkDialog
dialog = PermalinkDialog()
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
permalink = dialog.get_permalink_from_field()
await self.generate_seed_from_permalink(permalink)
def _import_spoiler_log(self):
json_path = common_qt_lib.prompt_user_for_input_game_log(self)
if json_path is not None:
layout = LayoutDescription.from_file(json_path)
self.open_game_details(layout)
@asyncSlot()
async def _browse_racetime(self):
from randovania.gui.dialog.racetime_browser_dialog import RacetimeBrowserDialog
dialog = RacetimeBrowserDialog()
if not await dialog.refresh():
return
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
await self.generate_seed_from_permalink(dialog.permalink)
def open_game_details(self, layout: LayoutDescription):
self.GameDetailsSignal.emit(layout)
def _open_game_details(self, layout: LayoutDescription):
from randovania.gui.seed_details_window import SeedDetailsWindow
details_window = SeedDetailsWindow(self, self._options)
details_window.update_layout_description(layout)
details_window.show()
self.track_window(details_window)
# Releases info
async def request_new_data(self):
from randovania.interface_common import github_releases_data
await self._on_releases_data(await github_releases_data.get_releases())
async def _on_releases_data(self, releases: Optional[List[dict]]):
import markdown
current_version = update_checker.strict_current_version()
last_changelog = self._options.last_changelog_displayed
all_change_logs, new_change_logs, version_to_display = update_checker.versions_to_display_for_releases(
current_version, last_changelog, releases)
if version_to_display is not None:
self.display_new_version(version_to_display)
if all_change_logs:
changelog_tab = QtWidgets.QWidget()
changelog_tab.setObjectName("changelog_tab")
changelog_tab_layout = QtWidgets.QVBoxLayout(changelog_tab)
changelog_tab_layout.setContentsMargins(0, 0, 0, 0)
changelog_tab_layout.setObjectName("changelog_tab_layout")
changelog_scroll_area = QtWidgets.QScrollArea(changelog_tab)
changelog_scroll_area.setWidgetResizable(True)
changelog_scroll_area.setObjectName("changelog_scroll_area")
changelog_scroll_contents = QtWidgets.QWidget()
changelog_scroll_contents.setGeometry(QtCore.QRect(0, 0, 489, 337))
changelog_scroll_contents.setObjectName("changelog_scroll_contents")
changelog_scroll_layout = QtWidgets.QVBoxLayout(changelog_scroll_contents)
changelog_scroll_layout.setObjectName("changelog_scroll_layout")
for entry in all_change_logs:
changelog_label = QtWidgets.QLabel(changelog_scroll_contents)
_update_label_on_show(changelog_label, markdown.markdown(entry))
changelog_label.setObjectName("changelog_label")
changelog_label.setWordWrap(True)
changelog_scroll_layout.addWidget(changelog_label)
changelog_scroll_area.setWidget(changelog_scroll_contents)
changelog_tab_layout.addWidget(changelog_scroll_area)
self.help_tab_widget.addTab(changelog_tab, "Change Log")
if new_change_logs:
await async_dialog.message_box(self, QtWidgets.QMessageBox.Information,
"What's new", markdown.markdown("\n".join(new_change_logs)))
with self._options as options:
options.last_changelog_displayed = current_version
def display_new_version(self, version: update_checker.VersionDescription):
if self.menu_new_version is None:
self.menu_new_version = QtWidgets.QAction("", self)
self.menu_new_version.triggered.connect(self.open_version_link)
self.menu_bar.addAction(self.menu_new_version)
self.menu_new_version.setText("New version available: {}".format(version.tag_name))
self._current_version_url = version.html_url
def open_version_link(self):
if self._current_version_url is None:
raise RuntimeError("Called open_version_link, but _current_version_url is None")
QtGui.QDesktopServices.openUrl(QUrl(self._current_version_url))
# Options
def on_options_changed(self):
self.menu_action_validate_seed_after.setChecked(self._options.advanced_validate_seed_after)
self.menu_action_timeout_generation_after_a_time_limit.setChecked(
self._options.advanced_timeout_during_generation)
self.menu_action_dark_mode.setChecked(self._options.dark_mode)
self.generate_seed_tab.on_options_changed(self._options)
theme.set_dark_theme(self._options.dark_mode)
# Menu Actions
def _open_data_visualizer_for_game(self, game: RandovaniaGame):
self.open_data_visualizer_at(None, None, game)
def open_data_visualizer_at(self,
world_name: Optional[str],
area_name: Optional[str],
game: RandovaniaGame = RandovaniaGame.PRIME2,
):
from randovania.gui.data_editor import DataEditorWindow
data_visualizer = DataEditorWindow.open_internal_data(game, False)
self._data_visualizer = data_visualizer
if world_name is not None:
data_visualizer.focus_on_world(world_name)
if area_name is not None:
data_visualizer.focus_on_area(area_name)
self._data_visualizer.show()
def _open_data_editor_for_game(self, game: RandovaniaGame):
from randovania.gui.data_editor import DataEditorWindow
self._data_editor = DataEditorWindow.open_internal_data(game, True)
self._data_editor.show()
def _open_data_editor_prompt(self):
from randovania.gui.data_editor import DataEditorWindow
database_path = common_qt_lib.prompt_user_for_database_file(self)
if database_path is None:
return
with database_path.open("r") as database_file:
self._data_editor = DataEditorWindow(json.load(database_file), database_path, False, True)
self._data_editor.show()
@asyncSlot()
async def _on_menu_action_map_tracker(self):
dialog = QtWidgets.QInputDialog(self)
dialog.setWindowTitle("Map Tracker")
dialog.setLabelText("Select preset used for the tracker.")
dialog.setComboBoxItems([preset.name for preset in self._preset_manager.all_presets])
dialog.setTextValue(self._options.selected_preset_name)
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
preset = self._preset_manager.preset_for_name(dialog.textValue())
self.open_map_tracker(preset.get_preset().configuration)
def open_map_tracker(self, configuration: "EchoesConfiguration"):
from randovania.gui.tracker_window import TrackerWindow, InvalidLayoutForTracker
try:
self._map_tracker = TrackerWindow(self._options.tracker_files_path, configuration)
except InvalidLayoutForTracker as e:
QtWidgets.QMessageBox.critical(
self,
"Unsupported configuration for Tracker",
str(e)
)
return
self._map_tracker.show()
def _open_item_tracker(self):
# Importing this at root level seems to crash linux tests :(
from PySide2.QtWebEngineWidgets import QWebEngineView
tracker_window = QtWidgets.QMainWindow()
tracker_window.setWindowTitle("Item Tracker")
tracker_window.resize(370, 380)
web_view = QWebEngineView(tracker_window)
tracker_window.setCentralWidget(web_view)
self.web_view = web_view
def update_window_icon():
tracker_window.setWindowIcon(web_view.icon())
web_view.iconChanged.connect(update_window_icon)
web_view.load(QUrl("https://spaghettitoastbook.github.io/echoes/tracker/"))
tracker_window.show()
self._item_tracker_window = tracker_window
# Difficulties stuff
def _exec_trick_details(self, popup: "TrickDetailsPopup"):
self._trick_details_popup = popup
self._trick_details_popup.setWindowModality(Qt.WindowModal)
self._trick_details_popup.open()
def _open_trick_details_popup(self, game, trick: TrickResourceInfo, level: LayoutTrickLevel):
from randovania.gui.dialog.trick_details_popup import TrickDetailsPopup
self._exec_trick_details(TrickDetailsPopup(self, self, game, trick, level))
def _create_trick_details_prime_1(self):
self.menu_prime_1_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_1)
self._setup_difficulties_menu(RandovaniaGame.PRIME1, self.menu_prime_1_trick_details)
def _create_trick_details_prime_2(self):
self.menu_prime_2_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_2)
self._setup_difficulties_menu(RandovaniaGame.PRIME2, self.menu_prime_2_trick_details)
def _create_trick_details_prime_3(self):
self.menu_prime_3_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_3)
self._setup_difficulties_menu(RandovaniaGame.PRIME3, self.menu_prime_3_trick_details)
def _setup_difficulties_menu(self, game: RandovaniaGame, menu: QtWidgets.QMenu):
from randovania.game_description import default_database
game = default_database.game_description_for(game)
tricks_in_use = used_tricks(game)
menu.clear()
for trick in sorted(game.resource_database.trick, key=lambda _trick: _trick.long_name):
if trick not in tricks_in_use:
continue
trick_menu = QtWidgets.QMenu(self)
trick_menu.setTitle(trick.long_name)
menu.addAction(trick_menu.menuAction())
used_difficulties = difficulties_for_trick(game, trick)
for i, trick_level in enumerate(iterate_enum(LayoutTrickLevel)):
if trick_level in used_difficulties:
difficulty_action = QtWidgets.QAction(self)
difficulty_action.setText(trick_level.long_name)
trick_menu.addAction(difficulty_action)
difficulty_action.triggered.connect(
functools.partial(self._open_trick_details_popup, game, trick, trick_level))
# ==========
@asyncSlot()
async def _on_validate_seed_change(self):
old_value = self._options.advanced_validate_seed_after
new_value = self.menu_action_validate_seed_after.isChecked()
if old_value and not new_value:
box = QtWidgets.QMessageBox(self)
box.setWindowTitle("Disable validation?")
box.setText(_DISABLE_VALIDATION_WARNING)
box.setIcon(QtWidgets.QMessageBox.Warning)
box.setStandardButtons(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No)
box.setDefaultButton(QtWidgets.QMessageBox.No)
user_response = await async_dialog.execute_dialog(box)
if user_response != QtWidgets.QMessageBox.Yes:
self.menu_action_validate_seed_after.setChecked(True)
return
with self._options as options:
options.advanced_validate_seed_after = new_value
def _on_generate_time_limit_change(self):
is_checked = self.menu_action_timeout_generation_after_a_time_limit.isChecked()
with self._options as options:
options.advanced_timeout_during_generation = is_checked
def _on_menu_action_dark_mode(self):
with self._options as options:
options.dark_mode = self.menu_action_dark_mode.isChecked()
def _open_auto_tracker(self):
from randovania.gui.auto_tracker_window import AutoTrackerWindow
self.auto_tracker_window = AutoTrackerWindow(common_qt_lib.get_game_connection(), self._options)
self.auto_tracker_window.show()
def _on_menu_action_previously_generated_games(self):
path = self._options.data_dir.joinpath("game_history")
try:
if platform.system() == "Windows":
os.startfile(path)
elif platform.system() == "Darwin":
subprocess.run(["open", path])
else:
subprocess.run(["xdg-open", path])
except OSError:
print("Exception thrown :)")
box = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Information, "Game History",
f"Previously generated games can be found at:\n{path}",
QtWidgets.QMessageBox.Ok, self)
box.setTextInteractionFlags(Qt.TextSelectableByMouse)
box.show()
def _on_menu_action_layout_editor(self):
from randovania.gui.corruption_layout_editor import CorruptionLayoutEditor
self.corruption_editor = CorruptionLayoutEditor()
self.corruption_editor.show()
|
gpl-3.0
| -1,612,572,667,298,678,800 | 44.330325 | 118 | 0.669454 | false | 3.87367 | false | false | false |
maltsev/LatexWebOffice
|
app/views/document.py
|
1
|
15983
|
# -*- coding: utf-8 -*-
"""
* Purpose : Dokument- und Projektverwaltung Schnittstelle
* Creation Date : 19-11-2014
* Last Modified : Di 24 Feb 2015 15:46:51 CET
* Author : mattis
* Coauthors : christian, ingo, Kirill
* Sprintnumber : 2, 5
* Backlog entry : TEK1, 3ED9, DOK8, DO14, KOL1
"""
import os
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django.shortcuts import render
from django.views.static import serve
import settings
from app.common import util
from app.common.constants import ERROR_MESSAGES
from app.views import file, folder, project, template
from app.models.projecttemplate import ProjectTemplate
from app.models.file.file import File
from app.models.file.texfile import TexFile
from app.models.file.plaintextfile import PlainTextFile
from app.models.file.pdf import PDF
from app.models.project import Project
from app.models.folder import Folder
globalparas = {
'id': {'name': 'id', 'type': int},
'content': {'name': 'content', 'type': str},
'folderid': {'name': 'folderid', 'type': int},
'name': {'name': 'name', 'type': str},
'formatid': {'name': 'formatid', 'type': int},
# 'compilerid': {'name': 'compilerid', 'type': int},
'forcecompile': {'name': 'forcecompile', 'type': int}
}
# dictionary mit verfügbaren Befehlen und den entsprechenden Aktionen
# die entsprechenden Methoden befinden sich in:
# '/app/views/project.py', '/app/views/file.py', '/app/views/folder.py' und '/app/views/collaboration.py'
available_commands = {
'projectcreate': {
'command': project.projectCreate,
'parameters': [{'para': globalparas['name'], 'stringcheck': True}]
},
'projectclone': {
'command': project.projectClone,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'projectrm': {
'command': project.projectRm,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'projectrename': {
'command': project.projectRename,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'listprojects': {
'command': project.listProjects,
'parameters': []
},
'importzip': {
'command': project.importZip,
'parameters': []
},
'exportzip': {
'command': project.exportZip,
'parameters': [{'para': globalparas['id']}]
},
'inviteuser': {
'command': project.inviteUser,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'hasinvitedusers': {
'command': project.hasInvitedUsers,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'listinvitedusers': {
'command': project.listInvitedUsers,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'listunconfirmedcollaborativeprojects': {
'command': project.listUnconfirmedCollaborativeProjects,
'parameters': []
},
'activatecollaboration': {
'command': project.activateCollaboration,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee']}]
},
'quitcollaboration': {
'command': project.quitCollaboration,
'parameters': [
{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee', 'collaborator']}]
},
'cancelcollaboration': {
'command': project.cancelCollaboration,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'createtex': {
'command': file.createTexFile,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'filenamecheck': True}]
},
'updatefile': {
'command': file.updateFile,
'parameters': [{'para': globalparas['id'], 'type': PlainTextFile,
'requirerights': ['owner', 'collaborator'], 'lockcheck': False},
{'para': globalparas['content']}]
},
'deletefile': {
'command': file.deleteFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True}]
},
'renamefile': {
'command': file.renameFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['name'], 'filenamecheck': True}]
},
'movefile': {
'command': file.moveFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'uploadfiles': {
'command': file.uploadFiles,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'downloadfile': {
'command': file.downloadFile,
'parameters': [{'para': globalparas['id']}]
},
'gettext': {
'command': file.getText,
'parameters': [{'para': globalparas['id'], 'type': PlainTextFile, 'requirerights': ['owner', 'collaborator']}]
},
'fileinfo': {
'command': file.fileInfo,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'compile': {
'command': file.latexCompile,
'parameters': [{'para': globalparas['id'], 'type': TexFile,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['formatid']},
# {'para': globalparas['compilerid']},
{'para': globalparas['forcecompile']}]
},
'lockfile': {
'command': file.lockFile,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'unlockfile': {
'command': file.unlockFile,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'getlog': {
'command': file.getLog,
'parameters': [{'para': globalparas['id'], 'type': TexFile, 'requirerights': ['owner', 'collaborator']}]
},
'createdir': {
'command': folder.createDir,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'rmdir': {
'command': folder.rmDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True}]
},
'renamedir': {
'command': folder.renameDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'movedir': {
'command': folder.moveDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'listfiles': {
'command': folder.listFiles,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'template2project': {
'command': template.template2Project,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate},
{'para': globalparas['name'], 'stringcheck': True}]
},
'project2template': {
'command': template.project2Template,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'templaterm': {
'command': template.templateRm,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate}]
},
'templaterename': {
'command': template.templateRename,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate},
{'para': globalparas['name'], 'stringcheck': True}]
},
'listtemplates': {
'command': template.listTemplates,
'parameters': []
}
}
available_commands_output = {}
for key, value in available_commands.items():
parameters = []
for paras in value['parameters']:
globalparainfo = (paras['para']).copy()
value = {'para': globalparainfo}
if globalparainfo.get('type'):
del globalparainfo['type']
parameters.append(value)
if key == 'uploadfiles' or key == 'importzip':
parameters.append({'para': {'name': 'files'}})
available_commands_output.update({key: parameters})
@login_required
def debug(request):
return render(request, 'documentPoster.html')
# Schnittstellenfunktion
# bietet eine Schnittstelle zur Kommunikation zwischen Client und Server
# liest den vom Client per POST Data übergebenen Befehl ein
# und führt die entsprechende Methode aus
@login_required
@require_http_methods(['POST', 'GET'])
def execute(request):
if request.method == 'POST' and 'command' in request.POST:
# hole den aktuellen Benutzer
user = request.user
# wenn der Schlüssel nicht gefunden wurde
# gib Fehlermeldung zurück
if request.POST['command'] not in available_commands:
return util.jsonErrorResponse(ERROR_MESSAGES['COMMANDNOTFOUND'], request)
args = []
# aktueller Befehl
c = available_commands[request.POST['command']]
# Parameter dieses Befehls
paras = c['parameters']
# durchlaufe alle Parameter des Befehls
for para in paras:
# wenn der Parameter nicht gefunden wurde oder ein Parameter, welcher eine id angeben sollte
# Zeichen enthält, die keine Zahlen sind, gib Fehlermeldung zurück
if request.POST.get(para['para']['name']) is None:
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request)
elif para['para']['type'] == int and (not request.POST.get(para['para']['name']).isdigit()):
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request)
# sonst füge den Parameter zu der Argumentliste hinzu
else:
args.append(request.POST[para['para']['name']])
# Teste auf ungültige strings
if para.get('stringcheck'):
failstring, failurereturn = util.checkObjectForInvalidString(
request.POST.get(para['para']['name']), request)
if not failstring:
return failurereturn
elif para.get('filenamecheck'):
failstring, failurereturn = util.checkFileForInvalidString(
request.POST.get(para['para']['name']), request)
if not failstring:
return failurereturn
# Teste, dass der User rechte auf das Objekt mit der angegebenen id
# hat und diese existiert
if para.get('type') and para['para']['type'] == int:
objType = para.get('type')
objId = request.POST.get(para['para']['name'])
requireRights = para.get('requirerights', ['owner'])
lockcheck = para.get('lockcheck', False)
if objType == Project:
rights, failurereturn = util.checkIfProjectExistsAndUserHasRights(objId, user, request,
requireRights)
if not rights:
return failurereturn
elif objType == Folder:
rights, failurereturn = util.checkIfDirExistsAndUserHasRights(objId, user, request, requireRights, lockcheck)
if not rights:
return failurereturn
elif objType == File:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=File)
if not rights:
return failurereturn
elif objType == TexFile:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=TexFile)
if not rights:
return failurereturn
elif objType == PlainTextFile:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=PlainTextFile)
if not rights:
return failurereturn
elif objType == ProjectTemplate:
# Überprüfe, ob Vorlage existiert und der User darauf Rechte hat
emptystring, failurereturn = util.checkIfTemplateExistsAndUserHasRights(objId, user, request)
if not emptystring:
return failurereturn
# führe den übergebenen Befehl aus
return c['command'](request, user, *args)
elif request.method == 'GET' and request.GET.get('command'):
command = request.GET.get('command')
pdfid = request.GET.get('id')
texid = request.GET.get('texid')
defaultpdfPath = filepath = os.path.join(settings.BASE_DIR, 'app', 'static', 'default.pdf')
if (pdfid and not pdfid.isdigit()) or (texid and not texid.isdigit()):
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
if command == 'getpdf' and pdfid:
requireRights = ['owner', 'collaborator']
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(pdfid, request.user, request, requireRights, lockcheck=False,
objecttype=PDF)
if not rights:
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
return file.getPDF(request, request.user, pdfid=pdfid, default=defaultpdfPath)
elif command == 'getpdf' and texid:
requireRights = ['owner', 'collaborator']
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(texid, request.user, request, requireRights, lockcheck=False,
objecttype=TexFile)
if not rights:
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
return file.getPDF(request, request.user, texid=texid, default=defaultpdfPath)
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % 'unknown', request)
|
gpl-3.0
| 953,337,725,685,583,200 | 42.63388 | 136 | 0.568503 | false | 3.9925 | false | false | false |
psyonara/agonizomai
|
sermons/models.py
|
1
|
5153
|
from __future__ import unicode_literals
from django.db import models
from django.template.defaultfilters import slugify
from bible.models import BibleBook
from useraccounts.models import UserAccount
class Author(models.Model):
name = models.CharField(null=False, blank=False, max_length=50)
name_slug = models.SlugField(max_length=50, null=True, blank=True, db_index=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
if self.name_slug is None or self.name_slug == "":
self.name_slug = slugify(self.name)
super(Author, self).save(*args, **kwargs)
class AuthorSetting(models.Model):
"""
Holds user settings specific to an author.
"""
author = models.ForeignKey(Author, on_delete=models.CASCADE)
user = models.ForeignKey("useraccounts.UserAccount", on_delete=models.CASCADE)
name = models.CharField(max_length=30, db_index=True)
value = models.CharField(max_length=50)
class Series(models.Model):
name = models.CharField(null=False, blank=False, max_length=100)
name_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True)
author = models.ForeignKey(Author, null=False, blank=False, on_delete=models.CASCADE)
complete = models.BooleanField(default=False)
def __str__(self):
return "%s (%s)" % (self.name, self.author.name)
def save(self, *args, **kwargs):
if self.name_slug is None or self.name_slug == "":
self.name_slug = slugify(self.name)
super(Series, self).save(*args, **kwargs)
class Sermon(models.Model):
date_added = models.DateTimeField(auto_now_add=True)
date_preached = models.DateField(null=True, blank=True)
author = models.ForeignKey(Author, related_name="sermons", on_delete=models.CASCADE)
title = models.CharField(null=False, blank=False, max_length=100)
title_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True)
series = models.ForeignKey(
Series, null=True, blank=True, related_name="sermons", on_delete=models.CASCADE
)
ref = models.CharField(max_length=20, null=True, blank=True)
def get_audio_file(self):
files = self.media_files.filter(media_type=1)
return files[0] if len(files) > 0 else None
def __str__(self):
return "%s (by %s)" % (self.title, self.author.name)
def save(self, *args, **kwargs):
if self.title_slug is None or self.title_slug == "":
self.title_slug = slugify(self.title)
super(Sermon, self).save(*args, **kwargs)
class Meta:
ordering = ["-date_preached"]
class ScriptureRef(models.Model):
sermon = models.ForeignKey(Sermon, related_name="scripture_refs", on_delete=models.CASCADE)
bible_book = models.ForeignKey(BibleBook, on_delete=models.CASCADE)
chapter_begin = models.PositiveSmallIntegerField()
chapter_end = models.PositiveSmallIntegerField()
verse_begin = models.PositiveSmallIntegerField(null=True, blank=True)
verse_end = models.PositiveSmallIntegerField(null=True, blank=True)
def __str__(self):
end_string = ""
if self.chapter_begin == self.chapter_end:
end_string += "%s %s" % (self.bible_book.name, self.chapter_begin)
if self.verse_begin is not None and self.verse_end is not None:
if self.verse_begin == self.verse_end:
end_string += ":%s" % (self.verse_begin)
else:
end_string += ":%s-%s" % (self.verse_begin, self.verse_end)
else:
end_string += "%s %s" % (self.bible_book.name, self.chapter_begin)
if self.verse_begin is None and self.verse_end is None:
end_string += "-%s" % (self.chapter_end)
else:
end_string += ":%s-%s:%s" % (self.verse_begin, self.chapter_end, self.verse_end)
return end_string
class MediaFile(models.Model):
MEDIA_TYPE_CHOICES = ((1, "audio"), (2, "video"), (3, "text"), (4, "pdf"))
LOCATION_TYPE_CHOICES = ((1, "url"),)
sermon = models.ForeignKey(Sermon, related_name="media_files", on_delete=models.CASCADE)
media_type = models.PositiveSmallIntegerField(choices=MEDIA_TYPE_CHOICES, null=False, default=1)
file_size = models.PositiveIntegerField(null=True, blank=True)
location_type = models.PositiveSmallIntegerField(
choices=LOCATION_TYPE_CHOICES, null=False, default=1
)
location = models.CharField(null=False, max_length=250)
def __str__(self):
return "%s (%s)" % (self.location, self.sermon.title)
class SermonSession(models.Model):
sermon = models.ForeignKey(Sermon, related_name="sessions", on_delete=models.CASCADE)
session_started = models.DateTimeField(auto_now_add=True)
session_updated = models.DateTimeField(auto_now=True)
position = models.PositiveSmallIntegerField(default=0) # in seconds from start of file
total_duration = models.PositiveSmallIntegerField(default=0) # in seconds
user = models.ForeignKey(UserAccount, on_delete=models.CASCADE)
completed = models.BooleanField(default=False)
|
mit
| -3,994,879,931,140,667,400 | 39.896825 | 100 | 0.663497 | false | 3.56609 | false | false | false |
funshine/rpidemo
|
mqtt_oled/oled_test_luma.py
|
1
|
1273
|
#!/usr/bin/python/
# coding: utf-8
import time
import datetime
from luma.core.interface.serial import i2c, spi
from luma.core.render import canvas
from luma.oled.device import ssd1306, ssd1325, ssd1331, sh1106
def do_nothing(obj):
pass
# rev.1 users set port=0
# substitute spi(device=0, port=0) below if using that interface
# serial = i2c(port=1, address=0x3C)
serial = spi(device=0, port=0)
# substitute ssd1331(...) or sh1106(...) below if using that device
# device = ssd1306(serial, rotate=1)
device = sh1106(serial)
# device.cleanup = do_nothing
print("Testing display Hello World")
with canvas(device) as draw:
draw.rectangle(device.bounding_box, outline="white", fill="black")
draw.text((30, 40), "Hello World", fill="white")
time.sleep(3)
print("Testing display ON/OFF...")
for _ in range(5):
time.sleep(0.5)
device.hide()
time.sleep(0.5)
device.show()
print("Testing clear display...")
time.sleep(2)
device.clear()
print("Testing screen updates...")
time.sleep(2)
for x in range(40):
with canvas(device) as draw:
now = datetime.datetime.now()
draw.text((x, 4), str(now.date()), fill="white")
draw.text((10, 16), str(now.time()), fill="white")
time.sleep(0.1)
print("Quit, cleanup...")
|
mit
| 2,993,683,248,832,655,000 | 23.018868 | 70 | 0.671642 | false | 3.009456 | false | false | false |
jantman/nagios-scripts
|
check_icinga_ido.py
|
1
|
6939
|
#!/usr/bin/env python
"""
Script to check last update of core programstatus
and service checks in Icinga ido2db Postgres database
"""
#
# The latest version of this script lives at:
# <https://github.com/jantman/nagios-scripts/blob/master/check_puppetdb_agent_run.py>
#
# Please file bug/feature requests and submit patches through
# the above GitHub repository. Feedback and patches are greatly
# appreciated; patches are preferred as GitHub pull requests, but
# emailed patches are also accepted.
#
# Copyright 2014 Jason Antman <jason@jasonantman.com> all rights reserved.
# See the above git repository's LICENSE file for license terms (GPLv3).
#
import sys
from datetime import datetime
import pytz
import logging
import argparse
from math import ceil
import nagiosplugin
import psycopg2
import pprint
_log = logging.getLogger('nagiosplugin')
utc = pytz.utc
class IdoStatus(nagiosplugin.Resource):
"""Check age of ido2db programstatus and last service check in postgres database"""
def __init__(self, db_host, db_name, db_user, db_pass, db_port=5432):
self.db_host = db_host
self.db_user = db_user
self.db_pass = db_pass
self.db_port = db_port
self.db_name = db_name
def probe(self):
_log.info("connecting to Postgres DB %s on %s" % (self.db_name, self.db_host))
try:
conn_str = "dbname='%s' user='%s' host='%s' password='%s' port='%s' application_name='%s'" % (
self.db_name,
self.db_user,
self.db_host,
self.db_pass,
self.db_port,
"check_icinga_ido_core.py",
)
_log.debug("psycopg2 connect string: %s" % conn_str)
conn = psycopg2.connect(conn_str)
except psycopg2.OperationalError, e:
_log.info("got psycopg2.OperationalError: %s" % e.__str__())
raise nagiosplugin.CheckError(e.__str__())
_log.info("connected to database")
# these queries come from https://wiki.icinga.org/display/testing/Special+IDOUtils+Queries
cur = conn.cursor()
_log.debug("got cursor")
sql = "SELECT EXTRACT(EPOCH FROM (NOW()-status_update_time)) AS age from icinga_programstatus where (UNIX_TIMESTAMP(status_update_time) > UNIX_TIMESTAMP(NOW())-60);"
_log.debug("executing query: %s" % sql)
cur.execute(sql)
row = cur.fetchone()
_log.debug("result: %s" % row)
programstatus_age = ceil(row[0])
sql = "select (UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(ss.status_update_time)) as age from icinga_servicestatus ss join icinga_objects os on os.object_id=ss.service_object_id order by status_update_time desc limit 1;"
_log.debug("executing query: %s" % sql)
cur.execute(sql)
row = cur.fetchone()
_log.debug("result: %s" % row)
last_check_age = ceil(row[0])
return [
nagiosplugin.Metric('programstatus_age', programstatus_age, uom='s', min=0),
nagiosplugin.Metric('last_check_age', last_check_age, uom='s', min=0),
]
class LoadSummary(nagiosplugin.Summary):
"""LoadSummary is used to provide custom outputs to the check"""
def __init__(self, db_name):
self.db_name = db_name
def _human_time(self, seconds):
"""convert an integer seconds into human-readable hms"""
mins, secs = divmod(seconds, 60)
hours, mins = divmod(mins, 60)
return '%02d:%02d:%02d' % (hours, mins, secs)
def _state_marker(self, state):
"""return a textual marker for result states"""
if type(state) == type(nagiosplugin.state.Critical):
return " (Crit)"
if type(state) == type(nagiosplugin.state.Warn):
return " (Warn)"
if type(state) == type(nagiosplugin.state.Unknown):
return " (Unk)"
return ""
def status_line(self, results):
if type(results.most_significant_state) == type(nagiosplugin.state.Unknown):
# won't have perf values, so special handling
return results.most_significant[0].hint.splitlines()[0]
return "Last Programstatus Update %s ago%s; Last Service Status Update %s ago%s (%s)" % (
self._human_time(results['programstatus_age'].metric.value),
self._state_marker(results['programstatus_age'].state),
self._human_time(results['last_check_age'].metric.value),
self._state_marker(results['last_check_age'].state),
self.db_name)
def ok(self, results):
return self.status_line(results)
def problem(self, results):
return self.status_line(results)
@nagiosplugin.guarded
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-H', '--hostname', dest='hostname',
help='Postgres server hostname')
parser.add_argument('-p', '--port', dest='port',
default='5432',
help='Postgres port (Default: 5432)')
parser.add_argument('-u', '--username', dest='username',
default='icinga-ido',
help='Postgres username (Default: icinga-ido)')
parser.add_argument('-a', '--password', dest='password',
default='icinga',
help='Postgres password (Default: icinga)')
parser.add_argument('-n', '--db-name', dest='db_name',
default='icinga_ido',
help='Postgres database name (Default: icinga_ido)')
parser.add_argument('-w', '--warning', dest='warning',
default='120',
help='warning threshold for age of last programstatus or service status update, in seconds (Default: 120 / 2m)')
parser.add_argument('-c', '--critical', dest='critical',
default='600',
help='critical threshold for age of last programstatus or service status update, in seconds (Default: 600 / 10m)')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='increase output verbosity (use up to 3 times)')
parser.add_argument('-t', '--timeout', dest='timeout',
default=30,
help='timeout (in seconds) for the command (Default: 30)')
args = parser.parse_args()
if not args.hostname:
raise nagiosplugin.CheckError('hostname (-H|--hostname) must be provided')
check = nagiosplugin.Check(
IdoStatus(args.hostname, args.db_name, args.username, args.password, args.port),
nagiosplugin.ScalarContext('programstatus_age', args.warning, args.critical),
nagiosplugin.ScalarContext('last_check_age', args.warning, args.critical),
LoadSummary(args.db_name))
check.main(args.verbose, args.timeout)
if __name__ == '__main__':
main()
|
gpl-3.0
| 4,422,069,438,603,399,000 | 41.833333 | 222 | 0.608157 | false | 3.785597 | false | false | false |
3DLIRIOUS/BlendSCAD
|
examples/example014.scad.py
|
1
|
1763
|
# OpenSCAD example, ported by Michael Mlivoncic
# a beautiful dice...
# an interesting test case, to get the Boolean operations somehow fixed (TODO)
#import sys
#sys.path.append("O:/BlenderStuff")
import blendscad
#import imp
#imp.reload(blendscad)
#imp.reload(blendscad.core)
#imp.reload(blendscad.primitives)
blendscad.initns( globals() ) # try to add BlendSCAD names to current namespace .. as if they would be in this file...
## Clear the open .blend file!!!
clearAllObjects()
###### End of Header ##############################################################################
#OpenSCAD' intersection_for() is only a work around. As standard "for" implies a union of its content, this one is a combination of
# for() and intersection() statements.
# Not really needed as we currently do not support implicit union()'s, but to demonstrate, how it would be rewritten.
# see: http://en.wikibooks.org/wiki/OpenSCAD_User_Manual/The_OpenSCAD_Language#Intersection_For_Loop
# intersection_for(i = [
# [0, 0, 0],
# [10, 20, 300],
# [200, 40, 57],
# [20, 88, 57]
# ])
# rotate(i) cube([100, 20, 20], center = true)
# example 2 - rotation:
#intersection_for(i = [ ]
tmp = None
rnge = [ [ 0, 0, 0],
[ 10, 20, 300],
[200, 40, 57],
[ 20, 88, 57] ]
for i in rnge:
tmp = intersection(
rotate(i ,
cube([100, 20, 20], center = true))
, tmp);
###### Begin of Footer ##############################################################################
color(rands(0,1,3)) # random color last object. to see "FINISH" :-)
# print timestamp and finish - sometimes it is easier to see differences in console then :-)
import time
import datetime
st = datetime.datetime.fromtimestamp( time.time() ).strftime('%Y-%m-%d %H:%M:%S')
echo ("FINISH", st)
|
gpl-3.0
| -6,193,139,217,817,806,000 | 26.546875 | 131 | 0.614861 | false | 3.234862 | false | false | false |
stackunderflow-stackptr/stackptr_web
|
crossbarconnect/client.py
|
1
|
8527
|
###############################################################################
##
## Copyright (C) 2012-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ['Client']
try:
import ssl
_HAS_SSL = True
except ImportError:
_HAS_SSL = False
import sys
_HAS_SSL_CLIENT_CONTEXT = sys.version_info >= (2,7,9)
import json
import hmac
import hashlib
import base64
import random
from datetime import datetime
import six
from six.moves.urllib import parse
from six.moves.http_client import HTTPConnection, HTTPSConnection
def _utcnow():
"""
Get current time in UTC as ISO 8601 string.
:returns str -- Current time as string in ISO 8601 format.
"""
now = datetime.utcnow()
return now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
def _parse_url(url):
"""
Parses a Crossbar.io HTTP bridge URL.
"""
parsed = parse.urlparse(url)
if parsed.scheme not in ["http", "https"]:
raise Exception("invalid Push URL scheme '%s'" % parsed.scheme)
if parsed.port is None or parsed.port == "":
if parsed.scheme == "http":
port = 80
elif parsed.scheme == "https":
port = 443
else:
raise Exception("logic error")
else:
port = int(parsed.port)
if parsed.fragment is not None and parsed.fragment != "":
raise Exception("invalid Push URL: non-empty fragment '%s" % parsed.fragment)
if parsed.query is not None and parsed.query != "":
raise Exception("invalid Push URL: non-empty query string '%s" % parsed.query)
if parsed.path is not None and parsed.path != "":
ppath = parsed.path
path = parse.unquote(ppath)
else:
ppath = "/"
path = ppath
return {'secure': parsed.scheme == "https",
'host': parsed.hostname,
'port': port,
'path': path}
class Client:
"""
Crossbar.io HTTP bridge client.
"""
def __init__(self, url, key = None, secret = None, timeout = 5, context = None):
"""
Create a new Crossbar.io push client.
The only mandatory argument is the Push service endpoint of the Crossbar.io
instance to push to.
For signed pushes, provide authentication key and secret. If those are not
given, unsigned pushes are performed.
:param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push).
:type url: str
:param key: Optional key to use for signing requests.
:type key: str
:param secret: When using signed request, the secret corresponding to key.
:type secret: str
:param timeout: Timeout for requests.
:type timeout: int
:param context: If the HTTP bridge is running on HTTPS (that is securely over TLS),
then the context provides the SSL settings the client should use (e.g. the
certificate chain against which to verify the server certificate). This parameter
is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently
ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext
:type context: obj or None
"""
if six.PY2:
if type(url) == str:
url = six.u(url)
if type(key) == str:
key = six.u(key)
if type(secret) == str:
secret = six.u(secret)
assert(type(url) == six.text_type)
assert((key and secret) or (not key and not secret))
assert(key is None or type(key) == six.text_type)
assert(secret is None or type(secret) == six.text_type)
assert(type(timeout) == int)
if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT:
assert(context is None or isinstance(context, ssl.SSLContext))
self._seq = 1
self._key = key
self._secret = secret
self._endpoint = _parse_url(url)
self._endpoint['headers'] = {
"Content-type": "application/json",
"User-agent": "crossbarconnect-python"
}
if self._endpoint['secure']:
if not _HAS_SSL:
raise Exception("Bridge URL is using HTTPS, but Python SSL module is missing")
if _HAS_SSL_CLIENT_CONTEXT:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout, context = context)
else:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
else:
self._connection = HTTPConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
def publish(self, topic, *args, **kwargs):
"""
Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge.
The event payload (positional and keyword) can be of any type that can be
serialized to JSON.
If `kwargs` contains an `options` attribute, this is expected to
be a dictionary with the following possible parameters:
* `exclude`: A list of WAMP session IDs to exclude from receivers.
* `eligible`: A list of WAMP session IDs eligible as receivers.
:param topic: Topic to push to.
:type topic: str
:param args: Arbitrary application payload for the event (positional arguments).
:type args: list
:param kwargs: Arbitrary application payload for the event (keyword arguments).
:type kwargs: dict
:returns int -- The event publication ID assigned by the broker.
"""
if six.PY2 and type(topic) == str:
topic = six.u(topic)
assert(type(topic) == six.text_type)
## this will get filled and later serialized into HTTP/POST body
##
event = {
'topic': topic
}
if 'options' in kwargs:
event['options'] = kwargs.pop('options')
assert(type(event['options']) == dict)
if args:
event['args'] = args
if kwargs:
event['kwargs'] = kwargs
try:
body = json.dumps(event, separators = (',',':'))
if six.PY3:
body = body.encode('utf8')
except Exception as e:
raise Exception("invalid event payload - not JSON serializable: {0}".format(e))
params = {
'timestamp': _utcnow(),
'seq': self._seq,
}
if self._key:
## if the request is to be signed, create extra fields and signature
params['key'] = self._key
params['nonce'] = random.randint(0, 9007199254740992)
# HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature
hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256)
hm.update(params['key'].encode('utf8'))
hm.update(params['timestamp'].encode('utf8'))
hm.update(u"{0}".format(params['seq']).encode('utf8'))
hm.update(u"{0}".format(params['nonce']).encode('utf8'))
hm.update(body)
signature = base64.urlsafe_b64encode(hm.digest())
params['signature'] = signature
self._seq += 1
path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params))
## now issue the HTTP/POST
##
self._connection.request('POST', path, body, self._endpoint['headers'])
response = self._connection.getresponse()
response_body = response.read()
if response.status not in [200, 202]:
raise Exception("publication request failed {0} [{1}] - {2}".format(response.status, response.reason, response_body))
try:
res = json.loads(response_body)
except Exception as e:
raise Exception("publication request bogus result - {0}".format(e))
return res['id']
|
agpl-3.0
| 3,583,649,226,256,367,000 | 32.108 | 126 | 0.587194 | false | 4.062411 | false | false | false |
VahidooX/DeepCCA
|
objectives.py
|
1
|
2281
|
import theano.tensor as T
def cca_loss(outdim_size, use_all_singular_values):
"""
The main loss function (inner_cca_objective) is wrapped in this function due to
the constraints imposed by Keras on objective functions
"""
def inner_cca_objective(y_true, y_pred):
"""
It is the loss function of CCA as introduced in the original paper. There can be other formulations.
It is implemented by Theano tensor operations, and does not work on Tensorflow backend
y_true is just ignored
"""
r1 = 1e-4
r2 = 1e-4
eps = 1e-12
o1 = o2 = y_pred.shape[1]//2
# unpack (separate) the output of networks for view 1 and view 2
H1 = y_pred[:, 0:o1].T
H2 = y_pred[:, o1:o1+o2].T
m = H1.shape[1]
H1bar = H1 - (1.0 / m) * T.dot(H1, T.ones([m, m]))
H2bar = H2 - (1.0 / m) * T.dot(H2, T.ones([m, m]))
SigmaHat12 = (1.0 / (m - 1)) * T.dot(H1bar, H2bar.T)
SigmaHat11 = (1.0 / (m - 1)) * T.dot(H1bar, H1bar.T) + r1 * T.eye(o1)
SigmaHat22 = (1.0 / (m - 1)) * T.dot(H2bar, H2bar.T) + r2 * T.eye(o2)
# Calculating the root inverse of covariance matrices by using eigen decomposition
[D1, V1] = T.nlinalg.eigh(SigmaHat11)
[D2, V2] = T.nlinalg.eigh(SigmaHat22)
# Added to increase stability
posInd1 = T.gt(D1, eps).nonzero()[0]
D1 = D1[posInd1]
V1 = V1[:, posInd1]
posInd2 = T.gt(D2, eps).nonzero()[0]
D2 = D2[posInd2]
V2 = V2[:, posInd2]
SigmaHat11RootInv = T.dot(T.dot(V1, T.nlinalg.diag(D1 ** -0.5)), V1.T)
SigmaHat22RootInv = T.dot(T.dot(V2, T.nlinalg.diag(D2 ** -0.5)), V2.T)
Tval = T.dot(T.dot(SigmaHat11RootInv, SigmaHat12), SigmaHat22RootInv)
if use_all_singular_values:
# all singular values are used to calculate the correlation
corr = T.sqrt(T.nlinalg.trace(T.dot(Tval.T, Tval)))
else:
# just the top outdim_size singular values are used
[U, V] = T.nlinalg.eigh(T.dot(Tval.T, Tval))
U = U[T.gt(U, eps).nonzero()[0]]
U = U.sort()
corr = T.sum(T.sqrt(U[0:outdim_size]))
return -corr
return inner_cca_objective
|
mit
| 6,360,399,072,148,163,000 | 34.640625 | 108 | 0.562034 | false | 2.809113 | false | false | false |
googleads/googleads-python-lib
|
examples/ad_manager/v202011/activity_group_service/get_active_activity_groups.py
|
1
|
1957
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all active activity groups.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
activity_group_service = client.GetService(
'ActivityGroupService', version='v202011')
# Create a statement to select activity groups.
statement = (ad_manager.StatementBuilder(version='v202011')
.Where('status = :status')
.WithBindVariable('status', 'ACTIVE'))
# Retrieve a small amount of activity groups at a time, paging
# through until all activity groups have been retrieved.
while True:
response = activity_group_service.getActivityGroupsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for activity_group in response['results']:
# Print out some information for each activity group.
print('Activity group with ID "%d" and name "%s" was found.\n' %
(activity_group['id'], activity_group['name']))
statement.offset += statement.limit
else:
break
print('\nNumber of results found: %s' % response['totalResultSetSize'])
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
|
apache-2.0
| 721,227,215,421,250,300 | 36.634615 | 74 | 0.709249 | false | 4.217672 | false | false | false |
macioosch/dynamo-hard-spheres-sim
|
convergence-plot.py
|
1
|
6346
|
#!/usr/bin/env python2
# encoding=utf-8
from __future__ import division, print_function
from glob import glob
from itertools import izip
from matplotlib import pyplot as plt
import numpy as np
input_files = glob("csv/convergence-256000-0.*.csv")
#input_files = glob("csv/convergence-500000-0.*.csv")
#input_files = glob("csv/convergence-1000188-0.*.csv")
#plotted_parameter = "msds_diffusion"
plotted_parameter = "pressures_collision"
#plotted_parameter = "pressures_virial"
#plotted_parameter = "msds_val"
#plotted_parameter = "times"
legend_names = []
tight_layout = False
show_legend = False
for file_number, file_name in enumerate(sorted(input_files)):
data = np.genfromtxt(file_name, delimiter='\t', names=[
"packings","densities","collisions","n_atoms","pressures_virial",
"pressures_collision","msds_val","msds_diffusion","times",
"std_pressures_virial","std_pressures_collision","std_msds_val",
"std_msds_diffusion","std_times"])
n_atoms = data["n_atoms"][0]
density = data["densities"][0]
equilibrated_collisions = data["collisions"] - 2*data["collisions"][0] \
+ data["collisions"][1]
"""
### 5 graphs: D(CPS) ###
tight_layout = True
skip_points = 0
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:],
data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:],
data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:], alpha=0.3)
plt.plot((equilibrated_collisions / n_atoms)[skip_points:],
data[plotted_parameter][skip_points:], lw=2)
if plotted_parameter == "msds_diffusion":
plt.ylim(0.990*data[plotted_parameter][-1],
1.005*data[plotted_parameter][-1])
plt.xlim([0, 1e5])
plt.legend(["Density {}".format(data["densities"][0])], loc="lower right")
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.4f'))
plt.xlabel("Collisions per sphere")
plt.ylabel("D")
"""
### 5 graphs: relative D(CPS) ###
tight_layout = True
skip_points = 0
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:],
-1 + (data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1],
-1 + (data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1], alpha=0.3)
plt.plot((equilibrated_collisions / n_atoms)[skip_points:],
-1 + data[plotted_parameter][skip_points:]/data[plotted_parameter][-1], lw=2)
plt.ylim(data["std_" + plotted_parameter][-1]*20*np.array([-1, 1])/data[plotted_parameter][-1])
#plt.xscale("log")
plt.xlim([0, 1e5])
plt.legend(["$\\rho\\sigma^3=\\ {}$".format(data["densities"][0])], loc="lower right")
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.2e'))
plt.xlabel("$C/N$")
plt.ylabel("$[Z_{MD}(C) / Z_{MD}(C=10^5 N)] - 1$")
"""
### 1 graph: D(t) ###
show_legend = True
skip_points = 0
plt.title("D(t) for 5 densities")
plt.loglog(data["times"][skip_points:],
data[plotted_parameter][skip_points:])
legend_names.append(data["densities"][0])
plt.xlabel("Time")
plt.ylabel("D")
"""
"""
### 1 graph: D(t) / Dinf ###
show_legend = True
skip_points = 0
#plt.fill_between(data["times"][skip_points:],
# (data[plotted_parameter] - data["std_" + plotted_parameter])
# / data[plotted_parameter][-1] - 1,
# (data[plotted_parameter] + data["std_" + plotted_parameter])
# / data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot(data["times"][skip_points:],
data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=1)
legend_names.append(data["densities"][0])
#plt.xscale("log")
plt.xlabel("Time")
plt.ylabel("D / D(t --> inf)")
"""
"""
### 5 graphs: D(1/CPS) ###
tight_layout = True
skip_points = 40
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((n_atoms / equilibrated_collisions)[skip_points:],
data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:],
data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:], alpha=0.3)
plt.plot((n_atoms / equilibrated_collisions)[skip_points:],
data[plotted_parameter][skip_points:], lw=2)
plt.title("Density {}:".format(data["densities"][0]))
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.7f'))
plt.xlim(xmin=0)
plt.xlabel("1 / Collisions per sphere")
plt.ylabel("D")
"""
"""
### 1 graph: D(CPS) / Dinf ###
show_legend = True
plt.fill_between(equilibrated_collisions / n_atoms,
(data[plotted_parameter] - data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1,
(data[plotted_parameter] + data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot(equilibrated_collisions / n_atoms,
data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=2)
legend_names.append(data["densities"][0])
plt.xlabel("Collisions per sphere")
plt.ylabel("D / D(t --> inf)")
"""
"""
### 1 graph: D(1/CPS) / Dinf ###
show_legend = True
plt.fill_between(n_atoms / equilibrated_collisions,
(data[plotted_parameter] - data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1,
(data[plotted_parameter] + data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot( n_atoms / equilibrated_collisions,
data[plotted_parameter] / data[plotted_parameter][-1] - 1)
legend_names.append(data["densities"][0])
plt.xlabel(" 1 / Collisions per sphere")
plt.ylabel(plotted_parameter)
"""
#if tight_layout:
# plt.tight_layout(pad=0.0, w_pad=0.0, h_pad=0.0)
if show_legend:
plt.legend(legend_names, title="Density:", loc="lower right")
plt.show()
|
gpl-3.0
| 1,205,206,185,801,680,600 | 39.941935 | 101 | 0.601954 | false | 3.083576 | false | false | false |
amdouglas/OpenPNM
|
OpenPNM/Geometry/models/throat_misc.py
|
1
|
1124
|
r"""
===============================================================================
throat_misc -- Miscillaneous and generic functions to apply to throats
===============================================================================
"""
import scipy as _sp
def random(geometry, seed=None, num_range=[0, 1], **kwargs):
r"""
Assign random number to throats
note: should this be called 'poisson'?
"""
range_size = num_range[1] - num_range[0]
range_min = num_range[0]
_sp.random.seed(seed=seed)
value = _sp.random.rand(geometry.num_throats(),)
value = value*range_size + range_min
return value
def neighbor(geometry, network, pore_prop='pore.seed', mode='min', **kwargs):
r"""
Adopt a value based on the neighboring pores
"""
throats = network.throats(geometry.name)
P12 = network.find_connected_pores(throats)
pvalues = network[pore_prop][P12]
if mode == 'min':
value = _sp.amin(pvalues, axis=1)
if mode == 'max':
value = _sp.amax(pvalues, axis=1)
if mode == 'mean':
value = _sp.mean(pvalues, axis=1)
return value
|
mit
| 7,511,632,487,340,780,000 | 30.222222 | 79 | 0.536477 | false | 3.523511 | false | false | false |
Digmaster/TicTacToe
|
Agent.py
|
1
|
2030
|
from random import randint
from random import getrandbits
from copy import deepcopy
# Agent that will either be the human player or a secondary agent for the dual agent play
class DumbAgent:
#initialize the board for the first player
def __init__(self, board):
self.board = board
def __str__(self):
return "Hi, Im dumb agent. I play randomly as player {0}".format(self.player)
# readin the next move for the human or secondary agent
def getNextMove(self, player):
board = deepcopy(self.board)
if(player!='X' and player!='O'):
raise ValueError('The only valid players are X and O')
while(True):
try:
square = randint(1, 9)
board.setSquare(square, player)
return square
except ValueError:
"""Do nothing"""
# Define the smart agent - uses the minimax algorithm
class SmartAgent:
def __init__(self, board):
self.board = board
self.signal = False
self.bestVal = None
def __str__(self):
return "Hi, Im smart agent. I whatever move will net me the most points, or avail my enemy of points. I'm {0}".format(self.player)
# to get the next move,call the decideMove function
def getNextMove(self, player):
self.decideMove(deepcopy(self.board), player)
return self.bestVal
def decideMove(self, board, player):
if(self.signal):
return 0
winner = board.testWin() # test for a winning solution to the current state
if(winner!='.'):
if(winner=='X'):
return 1.0
elif(winner=='T'):
return 0.0
else:
return -1.0
values = []
moves = {}
for i in range(1,10):
if(self.signal):
return 0
if(board.getSquare(i)=='.'):
nBoard = deepcopy(board)
nBoard.setSquare(i, player)
value = self.decideMove(nBoard, 'X' if player=='O' else 'O')
values.append(value)
moves[value] = i
if(player=='X'and value==1):
break
elif(player=='O' and value==-1):
break
# calculate the highest probability / best move
if(player=='X'):
sum = max(values)
else:
sum = min(values)
self.bestVal = moves[sum]
return sum
|
apache-2.0
| 287,197,937,694,822,820 | 25.363636 | 132 | 0.666995 | false | 3.142415 | false | false | false |
mfit/PdfTableAnnotator
|
script/csv-compare.py
|
1
|
8051
|
"""
Copyright 2014 Matthias Frey
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
CSV-compare
-----------
Compare table data stored in CSV (comma seperated values) format.
"""
import re
import csv
import sys
import os
def _pr_list(l1, l2, replace_chars = '[\n ]'):
""" Calculate precision and recall regarding elements of a list.
When a 1:1 match cannot be achieved, the list pointers will be
moved forward until a match occurs (first of list A, then of list B).
The closest match will count, and matching will continue from those
list positions onwards.
The replace_chars parameter is used to remove characters from the
strings before comparing. The default will remove newlines and spaces.
"""
def _fnext(l, item):
item = re.sub(replace_chars, '', item).strip()
for i, txt in enumerate(l):
txt = re.sub(replace_chars, '', txt).strip()
if txt == item:
return i
return -1
if len(l2)==0 or len(l1)==0:
return 0, 0
i = 0
j = 0
match = 0
while len(l1)>i and len(l2)>j:
t1 = re.sub(replace_chars, '', l1[i]).strip()
t2 = re.sub(replace_chars, '', l2[j]).strip()
if t1 == t2:
match += 1
i += 1
j += 1
else:
ii = _fnext(l1[i:], l2[j])
jj = _fnext(l2[j:], l1[i])
if ii>=0 and (ii<jj or jj<0): i+=ii
elif jj>=0: j+=jj
else:
i+=1
j+=1
return float(match)/len(l2), float(match)/len(l1)
def clean_table(tab):
""" Remove trailing empty cells resulting from the way some
spreadsheet application output csv for multi table documents.
"""
if len(tab) == 0:
return []
n_empty=[]
for row in tab:
for n, val in enumerate(reversed(row)):
if val!='':
break
n_empty.append(n)
strip_cols = min(n_empty)
cleaned = []
for row in tab:
cleaned.append(row[0:len(row)-strip_cols])
return cleaned
def compare_tables(tab1, tab2):
""" Compare two tables (2dim lists).
"""
info = {'rows_a':len(tab1),
'rows_b':len(tab2),
'rows_match': 1 if len(tab1) == len(tab2) else 0,
}
sizesA = [len(l) for l in tab1]
sizesB = [len(l) for l in tab2]
info['dim_match'] = 1 if sizesA == sizesB else 0
info['size_a'] = sum(sizesA)
info['size_b'] = sum(sizesA)
if len(sizesA)>0 and len(sizesB)>0:
info['cols_match'] = 1 if min(sizesA) == max(sizesA) and \
min(sizesB) == max(sizesB) and min(sizesA) == min(sizesB) else 0
# 'flatten' tables
cellsA = []
cellsB = []
for r in tab1: cellsA += [c for c in r]
for r in tab2: cellsB += [c for c in r]
info['p'], info['r'] = _pr_list(cellsA, cellsB)
info['F1'] = F1(info['p'], info['r'])
return info
def compare_files_pr(file1, file2):
""" Calculate simple P/R .
Compare lists of cells, left to right , top to bottom.
"""
cells = [[], []]
for i, fname in enumerate([file1, file2]):
with file(fname) as csvfile:
rd = csv.reader(csvfile, delimiter=',', quotechar='"')
for r in rd:
cells[i] += [c for c in r]
return _pr_list(*cells)
def compare_files(file1, file2):
""" Compare two csv files.
"""
groundtruth = read_tables_from_file(file1)
try:
compare = read_tables_from_file(file2)
except:
compare = []
tbs = [groundtruth, compare]
finfo = {'tabcount_a': len(tbs[0]),
'tabcount_b': len(tbs[1]),
'tabcount_match': len(tbs[0]) == len(tbs[1]),
}
finfo['tables']=[]
for n in range(0, len(tbs[0])):
if finfo['tabcount_match']:
comp_info = compare_tables(tbs[0][n], tbs[1][n])
else:
if n < len(tbs[1]):
comp_info = compare_tables(tbs[0][n], tbs[1][n])
else:
comp_info = compare_tables(tbs[0][n], [[]])
comp_info['n']=n
finfo['tables'].append(comp_info)
return finfo
def output_compareinfo_csv(file, info, fields=['p', 'r', 'F1']):
""" Pre-format a row that holds measures about similarity of a table
to the ground truth.
"""
lines = []
tabmatch = 1 if info['tabcount_match'] else 0
for tinfo in info['tables']:
lines.append([file, str(tabmatch)] + [str(tinfo[k]) for k in fields])
return lines
def F1(p, r):
""" Calculate F1 score from precision and recall.
Returns zero if one of p, r is zero.
"""
return (2*p*r/(p+r)) if p != 0 and r != 0 else 0
def read_tables_from_file(csvfile):
""" Opens csvfile, returns all tables found.
Guesses csv format (delimiter, etc.)
Splits data into different tables at newline (or empty row).
Returns list of tables.
"""
tables=[]
table_id = 0
with file(csvfile) as f:
sniffer = csv.Sniffer()
dialect = sniffer.sniff(f.next())
rd = csv.reader(f, delimiter=dialect.delimiter,
quotechar=dialect.quotechar)
for r in rd:
if len(tables) <= table_id:
tables.append([])
# Begin next table if there is an empty line
if r == [] or sum([len(v) for v in r]) == 0:
if len(tables[table_id])>0:
table_id+=1
else:
tables[table_id].append(r)
return [clean_table(t) for t in tables if t!=[]]
if __name__ == '__main__':
""" Script usage.
"""
fields = [
#'rows_a', 'rows_b',
#'size_a', 'size_b',
'n',
'rows_match', 'cols_match', 'dim_match',
'p', 'r', 'F1',]
limitchar = ' & '
if len(sys.argv) < 3:
print "Specify two (csv-)files or directories"
quit(-1)
# Params 1 + 2 are files or directories
file1 = sys.argv[1]
file2 = sys.argv[2]
srcinfo = [os.path.basename(file1), os.path.basename(file2)]
# 3rd parameter becomes 'tooldef' (text cols to name rows),
# and 4th parameter tells whether to print headers
tooldef = sys.argv[3].split('-') if len(sys.argv) > 3 else ['na', 'na']
print_headers = len(sys.argv) > 4 and sys.argv[4] in ["1", "y", "yes"]
if print_headers:
print ','.join(['name', 'tool', 'src1', 'src2',
'filename', 'tabsmatch',] + fields)
if os.path.isfile(file1) and os.path.isfile(file2):
inf = compare_files(file1, file2)
lines = output_compareinfo_csv(file1, inf, fields)
for l in lines:
print ','.join(tooldef + srcinfo + l)
elif os.path.isdir(file1) and os.path.isdir(file2):
for f in [path for path in os.listdir(file1) if path[-4:]=='.csv']:
if os.path.isfile(file2 + '/' + f):
inf = compare_files(file1 + '/' + f, file2 + '/' + f)
lines = output_compareinfo_csv(f, inf, fields)
for l in lines:
print ','.join(tooldef + srcinfo + l)
else:
print ','.join(['','',] + srcinfo + ['', "Missing {} for {} {}".format(f, *tooldef)])
|
apache-2.0
| -7,229,538,163,487,513,000 | 29.044776 | 101 | 0.527264 | false | 3.549824 | false | false | false |
3dfxmadscientist/odoo-infrastructure
|
addons/infrastructure/hostname.py
|
1
|
1468
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Infrastructure
# Copyright (C) 2014 Ingenieria ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
class hostname(osv.osv):
""""""
_name = 'infrastructure.hostname'
_description = 'hostname'
_columns = {
'name': fields.char(string='name', required=True),
'server_id': fields.many2one('infrastructure.server', string='Server', ondelete='cascade', required=True),
}
_defaults = {
}
_constraints = [
]
hostname()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| 2,010,866,711,856,777,500 | 27.784314 | 115 | 0.608311 | false | 4.242775 | false | false | false |
locke105/mclib
|
examples/wsgi.py
|
1
|
1781
|
import cgi
import json
from wsgiref import simple_server
import falcon
from mclib import mc_info
class MCInfo(object):
def on_get(self, req, resp):
host = req.get_param('host', required=True)
port = req.get_param_as_int('port', min=1024,
max=65565)
try:
if port is not None:
info = mc_info.get_info(host=host,
port=port)
else:
info = mc_info.get_info(host=host)
except Exception:
raise Exception('Couldn\'t retrieve info.')
if '.json' in req.uri:
resp.body = self.get_json(info)
return
preferred = req.client_prefers(['application/json', 'text/html'])
if 'html' in preferred:
resp.content_type = 'text/html'
resp.body = self.get_html(info)
else:
resp.body = self.get_json(info)
def get_html(self, info):
html = """<body>
<style>
table,th,td
{
border:1px solid black;
border-collapse:collapse
}
th,td
{
padding: 5px
}
</style>
<table>
"""
for k,v in info.iteritems():
items = {'key': cgi.escape(k)}
if isinstance(v, basestring):
items['val'] = cgi.escape(v)
else:
items['val'] = v
html = html + '<tr><td>%(key)s</td><td>%(val)s</td></tr>' % items
html = html + '</table></body>'
return html
def get_json(self, info):
return json.dumps(info)
app = falcon.API()
mcinfo = MCInfo()
app.add_route('/mcinfo', mcinfo)
app.add_route('/mcinfo.json', mcinfo)
if __name__ == '__main__':
httpd = simple_server.make_server('0.0.0.0', 3000, app)
httpd.serve_forever()
|
apache-2.0
| 4,936,456,139,620,774,000 | 21.2625 | 77 | 0.521617 | false | 3.485323 | false | false | false |
Meertecha/LearnPythonTheGame
|
pyGameEngine.py
|
1
|
3565
|
### Imports
import pickle, os, platform, random
### Functions
def main():
curPlayer = loadPlayer( 'Tory' )
curGame = loadGame( 'Python_Tutorial' )
startGame(curPlayer, curGame)
def banner():
'''
if platform.system() == "Windows":
clearCmd = "cls"
elif platform.system() == "Linux":
clearCmd = "clear"
else:
print ("Unknown operating system detected. Some operations may not perform correctly!\n")
os.system(clearCmd)
'''
version = 0.1
banner = (" **Welcome to the Python Learning Environment\n\
**Written by Tory Clasen - Version: " + str(version) + " \n\
**For help at any time please type '?' or 'help' \n\
**To exit the program type 'exit' or 'quit' \n\n")
print banner
def startGame(curPlayer, curGame):
try:
curScore = curPlayer['score'][curGame['gameName']]
except:
curScore = 0
while True:
#banner()
print '----------------------------------------\n' + curGame['gameName'] + ' has been loaded'
print curGame['banner'] + '\n----------------------------------------'
try:
pickle.dump( curPlayer, open( ( str(curPlayer['Name']) + ".plep"), "wb" ) )
except:
print "Error! Unable to save player profile at current location!"
print 'Your current score is: ' + str(curScore) + ' out of a total possible score of: ' + str(len(curGame['gameData']))
print "Question " + str(curScore) + ": \n" + str(curGame['gameData'][curScore]["Q"]) + "\n"
temp = curGame['gameData'][curScore]["D"]
data = eval(str(curGame['gameData'][curScore]["D"]))
print "Data " + str(curScore) + ": \n" + data
print '----------------------------------------\n'
try:
myAnswer = eval(str(getInput('What command do you want to submit? ')))
if myAnswer == (eval(str(curGame['gameData'][curScore]["A"]))):
print "Correct!"
curScore = curScore + 1
else:
print "Incorrect!"
except:
print 'The answer you submitted crashed the program, so it was probably wrong'
#break
def getInput(prompt):
theInput = raw_input( str(prompt) + "\n" )
if theInput == '?' or theInput.lower() == 'help':
print "HELP! HELP!"
elif theInput.lower() == 'exit' or theInput.lower() == 'quit':
raise SystemExit
else:
return theInput
def loadPlayer(playerName = ''):
#banner()
curPlayer = {}
if playerName == '':
playerName = getInput("I would like to load your profile. \nWhat is your name? ")
try:
# Attempt to load the player file.
curPlayer = pickle.load( open( ( str(playerName) + ".plep"), "rb" ) )
print "Player profile found... loading player data..."
except:
# Ask the player if they want to try to create a new profile file.
createNew = getInput( "Player profile not found for '" + str(playerName) + "'\nWould you like to create a new one? [Y/N]").lower()
curPlayer = {'Name':playerName}
if createNew == "y":
try:
pickle.dump( curPlayer, open( ( str(playerName) + ".plep"), "wb" ) )
print "Player profile successfully created!"
except:
print "Error! Unable to create player profile at current location!"
else:
print "Progress will not be saved for you..."
return curPlayer
def loadGame(gameName = ''):
banner()
curGame = {}
while True:
if gameName == '':
gameName = getInput("What game would you like to load? ")
try:
# Attempt to load the player file.
curGame = pickle.load( open( ( str(gameName) + ".pleg"), "rb" ) )
print "Game module found... loading game data..."
gameName = ''
break
except:
gameName = ''
print "Game module not found... please try again..."
return curGame
main()
|
mit
| 4,891,151,655,040,956,000 | 31.409091 | 133 | 0.615708 | false | 3.229167 | false | false | false |
openbig/odoo-contract
|
partner_billing/wizard/sale_make_invoice_advance.py
|
1
|
1615
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# partner_billing
# (C) 2015 Mikołaj Dziurzyński, Grzegorz Grzelak, Thorsten Vocks (big-consulting GmbH)
# All Rights reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp import fields, models
import logging
_logger = logging.getLogger(__name__)
class sale_advance_payment_inv(osv.osv_memory):
_inherit = "sale.advance.payment.inv"
def _prepare_advance_invoice_vals(self, cr, uid, ids, context=None):
res = super(sale_advance_payment_inv,self)._prepare_advance_invoice_vals(cr, uid, ids, context=context)
sale_order_obj = self.pool.get('sale.order')
for pair in res:
for sale in sale_order_obj.browse(cr, uid, [pair[0]]):
pair[1]['associated_partner'] = sale.associated_partner and sale.associated_partner.id or False
return res
|
agpl-3.0
| -6,439,668,196,751,878,000 | 39.325 | 105 | 0.651581 | false | 3.751163 | false | false | false |
lipixun/pytest
|
rabbitmq/deadchannel/going2dead.py
|
1
|
2112
|
#!/usr/bin/env python
# encoding=utf8
# The dead channel applicationn
import sys
reload(sys)
sys.setdefaultencoding('utf8')
from uuid import uuid4
from time import time, sleep
from haigha.connections.rabbit_connection import RabbitConnection
from haigha.message import Message
class Client(object):
"""The RPC Client
"""
def __init__(self, host, port, vhost, user, password):
"""Create a new Server
"""
self._conn = RabbitConnection(host = host, port = port, vhost = vhost, user = user, password = password)
self._channel = self._conn.channel()
result = self._channel.queue.declare(arguments = { 'x-dead-letter-exchange': 'amq.topic', 'x-dead-letter-routing-key': 'test.dead_channel' })
self._deadQueue = result[0]
# Send a message
self._channel.basic.publish(Message('OMG! I\'m dead!'), '', self._deadQueue)
def dead(self):
"""Normal dead
"""
self._channel.close()
if __name__ == '__main__':
from argparse import ArgumentParser
def getArguments():
"""Get arguments
"""
parser = ArgumentParser(description = 'RabbitMQ dead channel client')
parser.add_argument('--host', dest = 'host', required = True, help = 'The host')
parser.add_argument('--port', dest = 'port', default = 5672, type = int, help = 'The port')
parser.add_argument('--vhost', dest = 'vhost', default = '/test', help = 'The virtual host')
parser.add_argument('--user', dest = 'user', default = 'test', help = 'The user name')
parser.add_argument('--password', dest = 'password', default = 'test', help = 'The password')
# Done
return parser.parse_args()
def main():
"""The main entry
"""
args = getArguments()
# Create the server
client = Client(args.host, args.port, args.vhost, args.user, args.password)
# Go to dead
print 'Will go to dead in 10s, or you can use ctrl + c to cause a unexpected death'
sleep(10)
client.dead()
print 'Normal dead'
main()
|
gpl-2.0
| -8,202,055,047,594,408,000 | 33.064516 | 149 | 0.606061 | false | 3.84 | false | false | false |
gf712/AbPyTools
|
abpytools/core/fab_collection.py
|
1
|
14123
|
from .chain_collection import ChainCollection
import numpy as np
import pandas as pd
from .chain import calculate_charge
from abpytools.utils import DataLoader
from operator import itemgetter
from .fab import Fab
from .helper_functions import germline_identity_pd, to_numbering_table
from .base import CollectionBase
import os
import json
from .utils import (json_FabCollection_formatter, pb2_FabCollection_formatter, pb2_FabCollection_parser,
json_FabCollection_parser)
from .flags import *
if BACKEND_FLAGS.HAS_PROTO:
from abpytools.core.formats import FabCollectionProto
class FabCollection(CollectionBase):
def __init__(self, fab=None, heavy_chains=None, light_chains=None, names=None):
"""
Fab object container that handles combinations of light/heavy Chain pairs.
Args:
fab (list):
heavy_chains (ChainCollection):
light_chains (ChainCollection):
names (list):
"""
# check if it's a Chain object
if heavy_chains is None and light_chains is None and fab is None:
raise ValueError('Provide a list of Chain objects or an ChainCollection object')
# check if fab object is a list and if all object are abpytools.Fab objects
if isinstance(fab, list) and all(isinstance(fab_i, Fab) for fab_i in fab):
self._fab = fab
self._light_chains = ChainCollection([x[0] for x in self._fab])
self._heavy_chains = ChainCollection([x[1] for x in self._fab])
if fab is None and (heavy_chains is not None and light_chains is not None):
if isinstance(heavy_chains, list):
self._heavy_chains = ChainCollection(antibody_objects=heavy_chains)
elif isinstance(heavy_chains, ChainCollection):
self._heavy_chains = heavy_chains
else:
raise ValueError('Provide a list of Chain objects or an ChainCollection object')
if isinstance(light_chains, list):
self._light_chains = ChainCollection(antibody_objects=light_chains)
elif isinstance(light_chains, ChainCollection):
self._light_chains = light_chains
else:
raise ValueError('Provide a list of Chain objects or an ChainCollection object')
if len(self._light_chains.loading_status()) == 0:
self._light_chains.load()
if len(self._heavy_chains.loading_status()) == 0:
self._heavy_chains.load()
if self._light_chains.n_ab != self._heavy_chains.n_ab:
raise ValueError('Number of heavy chains must be the same of light chains')
if isinstance(names, list) and all(isinstance(name, str) for name in names):
if len(names) == self._heavy_chains.n_ab:
self._names = names
else:
raise ValueError(
'Length of name list must be the same as length of heavy_chains/light chains lists')
elif names is None:
self._names = ['{} - {}'.format(heavy, light) for heavy, light in zip(self._heavy_chains.names,
self._light_chains.names)]
else:
raise ValueError("Names expected a list of strings, instead got {}".format(type(names)))
self._n_ab = self._light_chains.n_ab
self._pair_sequences = [heavy + light for light, heavy in zip(self._heavy_chains.sequences,
self._light_chains.sequences)]
# keep the name of the heavy and light chains internally to keep everything in the right order
self._internal_heavy_name = self._heavy_chains.names
self._internal_light_name = self._light_chains.names
# even though it makes more sense to draw all these values from the base Fab objects this is much slower
# whenever self._n_ab > 1 it makes more sense to use the self._heavy_chain and self._light_chain containers
# in all the methods
# in essence the abpytools.Fab object is just a representative building block that could in future just
# cache data and would then represent a speed up in the calculations
def molecular_weights(self, monoisotopic=False):
return [heavy + light for heavy, light in zip(self._heavy_chains.molecular_weights(monoisotopic=monoisotopic),
self._light_chains.molecular_weights(monoisotopic=monoisotopic))]
def extinction_coefficients(self, extinction_coefficient_database='Standard', reduced=False, normalise=False,
**kwargs):
heavy_ec = self._heavy_chains.extinction_coefficients(
extinction_coefficient_database=extinction_coefficient_database,
reduced=reduced)
light_ec = self._light_chains.extinction_coefficients(
extinction_coefficient_database=extinction_coefficient_database,
reduced=reduced)
if normalise:
return [(heavy + light) / mw for heavy, light, mw in
zip(heavy_ec, light_ec, self.molecular_weights(**kwargs))]
else:
return [heavy + light for heavy, light in zip(heavy_ec, light_ec)]
def hydrophobicity_matrix(self):
return np.column_stack((self._heavy_chains.hydrophobicity_matrix(), self._light_chains.hydrophobicity_matrix()))
def charge(self):
return np.column_stack((self._heavy_chains.charge, self._light_chains.charge))
def total_charge(self, ph=7.4, pka_database='Wikipedia'):
available_pi_databases = ["EMBOSS", "DTASetect", "Solomon", "Sillero", "Rodwell", "Wikipedia", "Lehninger",
"Grimsley"]
assert pka_database in available_pi_databases, \
"Selected pI database {} not available. Available databases: {}".format(pka_database,
' ,'.join(available_pi_databases))
data_loader = DataLoader(data_type='AminoAcidProperties', data=['pI', pka_database])
pka_data = data_loader.get_data()
return [calculate_charge(sequence=seq, ph=ph, pka_values=pka_data) for seq in self.sequences]
def igblast_local_query(self, file_path, chain):
if chain.lower() == 'light':
self._light_chains.igblast_local_query(file_path=file_path)
elif chain.lower() == 'heavy':
self._heavy_chains.igblast_local_query(file_path=file_path)
else:
raise ValueError('Specify if the data being loaded is for the heavy or light chain')
def igblast_server_query(self, **kwargs):
self._light_chains.igblast_server_query(**kwargs)
self._heavy_chains.igblast_server_query(**kwargs)
def numbering_table(self, as_array=False, region='all', chain='both', **kwargs):
return to_numbering_table(as_array=as_array, region=region, chain=chain,
heavy_chains_numbering_table=self._heavy_chains.numbering_table,
light_chains_numbering_table=self._light_chains.numbering_table,
names=self.names, **kwargs)
def _germline_pd(self):
# empty dictionaries return false, so this condition checks if any of the values are False
if all([x for x in self._light_chains.germline_identity.values()]) is False:
# this means there is no information about the germline,
# by default it will run a web query
self._light_chains.igblast_server_query()
if all([x for x in self._heavy_chains.germline_identity.values()]) is False:
self._heavy_chains.igblast_server_query()
heavy_chain_germlines = self._heavy_chains.germline
light_chain_germlines = self._light_chains.germline
data = np.array([[heavy_chain_germlines[x][0] for x in self._internal_heavy_name],
[heavy_chain_germlines[x][1] for x in self._internal_heavy_name],
[light_chain_germlines[x][0] for x in self._internal_light_name],
[light_chain_germlines[x][1] for x in self._internal_light_name]]).T
df = pd.DataFrame(data=data,
columns=pd.MultiIndex.from_tuples([('Heavy', 'Assignment'),
('Heavy', 'Score'),
('Light', 'Assignment'),
('Light', 'Score')]),
index=self.names)
df.loc[:, (slice(None), 'Score')] = df.loc[:, (slice(None), 'Score')].apply(pd.to_numeric)
return df
def save_to_json(self, path, update=True):
with open(os.path.join(path + '.json'), 'w') as f:
fab_data = json_FabCollection_formatter(self)
json.dump(fab_data, f, indent=2)
def save_to_pb2(self, path, update=True):
proto_parser = FabCollectionProto()
try:
with open(os.path.join(path + '.pb2'), 'rb') as f:
proto_parser.ParseFromString(f.read())
except IOError:
# Creating new file
pass
pb2_FabCollection_formatter(self, proto_parser)
with open(os.path.join(path + '.pb2'), 'wb') as f:
f.write(proto_parser.SerializeToString())
def save_to_fasta(self, path, update=True):
raise NotImplementedError
@classmethod
def load_from_json(cls, path, n_threads=20, verbose=True, show_progressbar=True):
with open(path, 'r') as f:
data = json.load(f)
fab_objects = json_FabCollection_parser(data)
fab_collection = cls(fab=fab_objects)
return fab_collection
@classmethod
def load_from_pb2(cls, path, n_threads=20, verbose=True, show_progressbar=True):
with open(path, 'rb') as f:
proto_parser = FabCollectionProto()
proto_parser.ParseFromString(f.read())
fab_objects = pb2_FabCollection_parser(proto_parser)
fab_collection = cls(fab=fab_objects)
return fab_collection
@classmethod
def load_from_fasta(cls, path, numbering_scheme=NUMBERING_FLAGS.CHOTHIA, n_threads=20,
verbose=True, show_progressbar=True):
raise NotImplementedError
def _get_names_iter(self, chain='both'):
if chain == 'both':
for light_chain, heavy_chain in zip(self._light_chains, self._heavy_chains):
yield f"{light_chain.name}-{heavy_chain.name}"
elif chain == 'light':
for light_chain in self._light_chains:
yield light_chain.name
elif chain == 'heavy':
for heavy_chain in self._heavy_chains:
yield heavy_chain.name
else:
raise ValueError(f"Unknown chain type ({chain}), available options are:"
f"both, light or heavy.")
@property
def regions(self):
heavy_regions = self._heavy_chains.ab_region_index()
light_regions = self._light_chains.ab_region_index()
return {name: {CHAIN_FLAGS.HEAVY_CHAIN: heavy_regions[heavy],
CHAIN_FLAGS.LIGHT_CHAIN: light_regions[light]} for name, heavy, light in
zip(self.names, self._internal_heavy_name, self._internal_light_name)}
@property
def names(self):
return self._names
@property
def sequences(self):
return self._pair_sequences
@property
def aligned_sequences(self):
return [heavy + light for light, heavy in
zip(self._heavy_chains.aligned_sequences,
self._light_chains.aligned_sequences)]
@property
def n_ab(self):
return self._n_ab
@property
def germline_identity(self):
return self._germline_identity()
@property
def germline(self):
return self._germline_pd()
def _string_summary_basic(self):
return "abpytools.FabCollection Number of sequences: {}".format(self._n_ab)
def __len__(self):
return self._n_ab
def __repr__(self):
return "<%s at 0x%02x>" % (self._string_summary_basic(), id(self))
def __getitem__(self, indices):
if isinstance(indices, int):
return Fab(heavy_chain=self._heavy_chains[indices],
light_chain=self._light_chains[indices],
name=self.names[indices], load=False)
else:
return FabCollection(heavy_chains=list(itemgetter(*indices)(self._heavy_chains)),
light_chains=list(itemgetter(*indices)(self._light_chains)),
names=list(itemgetter(*indices)(self._names)))
def _germline_identity(self):
# empty dictionaries return false, so this condition checks if any of the values are False
if all([x for x in self._light_chains.germline_identity.values()]) is False:
# this means there is no information about the germline,
# by default it will run a web query
self._light_chains.igblast_server_query()
if all([x for x in self._heavy_chains.germline_identity.values()]) is False:
self._heavy_chains.igblast_server_query()
return germline_identity_pd(self._heavy_chains.germline_identity,
self._light_chains.germline_identity,
self._internal_heavy_name,
self._internal_light_name,
self._names)
def get_object(self, name):
"""
:param name: str
:return:
"""
if name in self.names:
index = self.names.index(name)
return self[index]
else:
raise ValueError('Could not find sequence with specified name')
|
mit
| -4,991,626,911,150,680,000 | 41.158209 | 120 | 0.593075 | false | 4.025941 | false | false | false |
jwill89/clifford-discord-bot
|
source/retired/main.py
|
1
|
31345
|
import discord
from discord.ext import commands
import random
import MySQLdb
# ********************************************** #
# DEFINITIONS ********************************** #
# ********************************************** #
# Bot Description
description = '''Official Zealot Gaming Discord bot!'''
# Define Bot
bot = commands.Bot(command_prefix='!', description='Official Zealot Gaming Discord Bot')
# Define MySQL DB and Cursor Object
db = MySQLdb.connect(host="localhost",
user="discord_secure",
passwd="password-here",
db="discord")
# ********************************************** #
# FUNCTIONS ************************************ #
# ********************************************** #
# Check for Game Abbreviations
def is_game_abv(game_abv: str):
try:
sql = "SELECT 1 FROM games WHERE `abv` = %s LIMIT 1"
cur = db.cursor()
result = cur.execute(sql, (game_abv,))
cur.close()
except Exception as e:
print('Exception: ' + str(e))
result = 0
# If we got a result, true, else false
return result == 1
# Check for Game Names
def is_game_name(game_name: str):
try:
sql = "SELECT 1 FROM games WHERE `name` = %s LIMIT 1"
cur = db.cursor()
result = cur.execute(sql, (game_name,))
cur.close()
except Exception as e:
print('Exception: ' + str(e))
result = 0
# If we got a result, true, else false
return result == 1
# Check for Staff Member Status
def is_staff(member: discord.Member):
# Return True or False if User is a Staff Member
return 'Staff' in [r.name for r in member.roles]
# ********************************************** #
# BOT EVENTS *********************************** #
# ********************************************** #
# Bot Start Event
@bot.event
async def on_ready():
print('Logged in as')
print(bot.user.name)
print(bot.user.id)
print('------')
await bot.change_presence(game=discord.Game(name='Zealot Gaming'))
# Welcome Message
@bot.event
async def on_member_join(member):
channel = bot.get_channel('108369515502411776')
fmt = "Everyone welcome {0.mention} to Zealot Gaming! Have a great time here! :wink: " \
"http://puu.sh/nG6Qe.wav".format(member)
await bot.send_message(channel, fmt)
# Goodbye Message
@bot.event
async def on_member_remove(member):
channel = bot.get_channel('108369515502411776')
fmt = ":wave: Goodbye {0}, we're sad to see you go!".format(member.name)
await bot.send_message(channel, fmt)
# ********************************************** #
# UN-GROUPED BOT COMMANDS ********************** #
# ********************************************** #
# COMMAND: !hello
@bot.command(pass_context=True)
async def hello(ctx):
# we do not want the bot to reply to itself
if ctx.message.author == bot.user:
return
else:
msg = 'Hello {0.message.author.mention}'.format(ctx)
await bot.send_message(ctx.message.channel, msg)
# COMMAND: !carlito
@bot.command()
async def carlito():
"""The legendary message of Carlito, maz00's personal cabana boy."""
await bot.say("wew men :ok_hand::skin-tone-1: that's some good shit:100: some good shit :100: that's some good shit"
" right there :100: :ok_hand::skin-tone-1: right there :ok_hand::skin-tone-1: :100: sign me the FUCK "
"up:100: :100: :ok_hand::skin-tone-1: :eggplant:")
# COMMAND: !eightball
@bot.command(pass_context=True)
async def eightball(ctx, question: str):
"""Rolls a magic 8-ball to answer any question you have."""
if question is None:
await bot.say('{0.message.author.mention}, you did not ask a question.'.format(ctx))
return
# Answers List (Classic 8-Ball, 20 Answers)
answers = ['It is certain.',
'It is decidedly so',
'Without a doubt.',
'Yes, definitely.',
'You may rely on it.',
'As I see it, yes.',
'Most likely.',
'Outlook good.',
'Yes.',
'Signs point to yes.',
'Reply hazy; try again.',
'Ask again later.',
'Better not tell you now.',
'Cannot predict now.',
'Concentrate, then ask again.',
'Do not count on it.',
'My reply is no.',
'My sources say no.',
'Outlook not so good.',
'Very doubtful.']
# Send the Answer
await bot.say('{0.message.author.mention}, '.format(ctx) + random.choice(answers))
# COMMAND: !roll
@bot.command()
async def roll(dice: str):
"""Rolls a dice in NdN format."""
try:
rolls, limit = map(int, dice.split('d'))
except Exception:
await bot.say('Format has to be in NdN!')
return
result = ', '.join(str(random.randint(1, limit)) for r in range(rolls))
await bot.say(result)
# COMMAND: !choose
@bot.command()
async def choose(*choices: str):
"""Chooses between multiple choices."""
await bot.say(random.choice(choices))
# COMMAND: !joined
@bot.command()
async def joined(member: discord.Member):
"""Says when a member joined."""
await bot.say('{0.name} joined in {0.joined_at}'.format(member))
# COMMAND: !get_roles
@bot.command()
async def get_roles(member: discord.Member):
"""Lists a User's Roles"""
total = 0
role_list = ''
for role in member.roles:
if total > 0:
role_list += ', '
role_list += str(role)
total += 1
await bot.say('{0.name} is a member of these roles: '.format(member) + role_list)
# COMMAND: !get_channel_id
@bot.command(pass_context=True)
async def get_channel_id(ctx):
"""Lists the ID of the channel the message is sent in."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
await bot.say('Channel ID is {0.id}'.format(ctx.message.channel))
# COMMAND: !join
@bot.command(pass_context=True)
async def join(ctx, *, role_name: str):
"""Allows a user to join a public group."""
# List of Allowed Public Roles
allowed_roles = ['Europe',
'North America',
'Oceania',
'Overwatch',
'League of Legends',
'Co-op',
'Minna-chan']
if role_name not in allowed_roles:
await bot.say('{0.mention}, you may only join allowed public groups.'.format(ctx.message.author))
return
# Define role, then add role to member.
try:
role = discord.utils.get(ctx.message.server.roles, name=role_name)
await bot.add_roles(ctx.message.author, role)
except Exception as e:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an error getting the roster for you. "
"I'm sorry! : ".format(ctx.message.author) + str(e))
return
# Success Message
await bot.say('{0.mention}, you have successfully been added to the group **{1}**.'
.format(ctx.message.author, role_name))
# ********************************************** #
# GROUPED COMMANDS : EVENTS ******************** #
# ********************************************** #
# COMMAND: !events
@bot.group(pass_context=True)
async def events(ctx):
"""Manage events and attendance!"""
if ctx.invoked_subcommand is None:
await bot.say('Invalid command passed. Must be *add*, *description*, *edit*, *register*, or *remove*.')
# COMMAND: !events add
@events.command(name='add', pass_context=True)
async def events_add(ctx, date: str, time: str, *, title: str):
"""Add an event to the Events List!
Date **must** be in YYYY/MM/DD format. Time **must** be in UTC."""
# Set #events Channel
event_channel = bot.get_channel('296694692135829504')
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Make sure we have a date.
if date is None:
await bot.say('Error: You must enter a date in YYYY/MM/DD format.')
return
# Make sure we have a time.
if time is None:
await bot.say('Error: You must enter a time in HH:MM format in UTC timezone.')
return
# Make sure we have a title.
if date is None:
await bot.say('Error: You must enter a title for the event.')
return
# Add Event to Database
try:
sql = "INSERT INTO events (`date`,`time`,`title`) VALUES (%s, %s, %s)"
cur = db.cursor()
cur.execute(sql, (date, time, title))
event_id = cur.lastrowid
msg_text = "**Title**: {0} \n**Event ID**: {1} \n**Date & Time**: {2} at {3} (UTC)"
# Add Message to Events Channel and Save Message ID
message = await bot.send_message(event_channel, msg_text.format(title, event_id, date, time))
cur.execute('UPDATE events SET `message_id` = %s WHERE `event_id` = %s', (message.id, event_id))
db.commit()
cur.close()
except Exception as e:
await bot.say('{0.mention}, there was an error adding the event to the list. '.format(ctx.message.author)
+ str(e))
return
# Success Message
await bot.say('{0.mention}, your event was successfully added. The event ID is: {1}.'
.format(ctx.message.author, event_id))
# COMMAND: !events description
@events.command(name='description', pass_context=True)
async def events_description(ctx, event_id: int, *, desc: str):
"""Adds a Description to an Event Given an Event ID."""
# EVENT CHANNEL ID: 296694692135829504
event_channel = bot.get_channel('296694692135829504')
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Make sure we have a date.
if event_id is None:
await bot.say('Error: You must enter an event ID. Check the #events channel.')
return
# Make sure we have a date.
if desc is None:
await bot.say('Error: You must enter a description.')
return
try:
sql = "UPDATE events SET `description` = %s WHERE `event_id` = %s"
cur = db.cursor()
cur.execute(sql, (desc, event_id))
cur.execute("SELECT `message_id` FROM events WHERE `event_id` = %s", (event_id,))
msg_id = cur.fetchone()
message = await bot.get_message(event_channel, msg_id[0])
msg_text = message.content + " \n**Description**: {0}".format(desc)
# Update Message in Events Channel with Description
await bot.edit_message(message, msg_text)
db.commit()
cur.close()
except Exception as e:
await bot.say('{0.mention}, there was an error adding a description to the event. '.format(ctx.message.author)
+ str(e))
return
# Success Message
await bot.say('{0.mention}, the event was successfully updated with a description.'.format(ctx.message.author))
# ********************************************** #
# GROUPED COMMANDS : GAMES ********************* #
# ********************************************** #
# COMMAND: !games
@bot.group(pass_context=True)
async def games(ctx):
"""Manages games for the roster."""
if ctx.invoked_subcommand is None:
await bot.say('Invalid command passed. Must be *add*, *edit*, *list*, or *remove*.')
# COMMAND: !games add
@games.command(name='add', pass_context=True)
async def games_add(ctx, game_abv: str, *, game_name: str):
"""Adds a game to the list of games available in the roster."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Does Game Abbreviation Exist?
if is_game_abv(game_abv):
await bot.say('{0.mention}, this abbreviation is already in use.'.format(ctx.message.author))
return
# Does Game Name Exist?
if is_game_name(game_name):
await bot.say('{0.mention}, this game is already in the list.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "INSERT INTO games (`abv`,`name`) VALUES (%s, %s)"
cur = db.cursor()
cur.execute(sql, (game_abv, game_name))
db.commit()
cur.close()
except Exception as e:
await bot.say('{0.mention}, there was an error adding the game to the games list. '.format(ctx.message.author)
+ str(e))
return
# Display Success Message
await bot.say('{0.mention}, the game was successfully added to the games list!'.format(ctx.message.author))
# COMMAND: !games edit
@games.command(name='edit', pass_context=True)
async def games_edit(ctx, game_abv: str, *, game_name: str):
"""Updates a game in the list of games available in the roster."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Is there anything to update?
if not (is_game_abv(game_abv) or is_game_name(game_name)):
await bot.say('{0.mention}, either the abbreviation of game must exist to update.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "UPDATE games SET `abv` = %s, `name = %s WHERE `abv` = %s OR `name` = %s"
cur = db.cursor()
cur.execute(sql, (game_abv, game_name, game_abv, game_name))
db.commit()
cur.close()
except Exception as e:
await bot.say('{0.mention}, there was an error updating the game in the games list. '.format(ctx.message.author)
+ str(e))
return
# Display Success Message
await bot.say('{0.mention}, the game was successfully updated in the games list!'.format(ctx.message.author))
# COMMAND: !games remove
@games.command(name='remove', pass_context=True)
async def games_remove(ctx, *, game_or_abv: str):
"""Removes a game from the list of games available in the roster."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Is there anything to update?
if not (is_game_abv(game_or_abv) or is_game_name(game_or_abv)):
await bot.say('{0.mention}, either the abbreviation of game must exist to update.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "DELETE FROM games WHERE `abv` = %s OR `name` = %s"
cur = db.cursor()
cur.execute(sql, (game_or_abv, game_or_abv))
db.commit()
cur.close()
except Exception as e:
await bot.say("{0.mention}, there was an error deleting the game from the games list."
" ".format(ctx.message.author) + str(e))
return
# Display Success Message
await bot.say('{0.mention}, the game was successfully deleted from the games list!'.format(ctx.message.author))
# COMMAND: !games list
@games.command(name='list', pass_context=True)
async def games_list(ctx):
"""Sends a message to the user with the current games and abbreviations for use in the roster."""
# Handle Database
try:
sql = "SELECT `abv`, `name` FROM games ORDER BY `name`"
cur = db.cursor()
cur.execute(sql)
result = cur.fetchall()
cur.close()
except Exception:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an error getting the list of games for you."
" I'm sorry!".format(ctx.message.author))
return
# Create Variables for Embed Table
abvs = ''
names = ''
for row in result:
abvs += (row[0] + '\n')
names += (row[1] + '\n')
# Create Embed Table
embed = discord.Embed()
embed.add_field(name="Abbreviation", value=abvs, inline=True)
embed.add_field(name="Game Name", value=names, inline=True)
# Send Table to User Privately
await bot.send_message(ctx.message.channel, embed=embed)
# ********************************************** #
# GROUPED COMMANDS : ROSTER ******************** #
# ********************************************** #
# COMMAND: !roster
@bot.group(pass_context=True)
async def roster(ctx):
"""Handles Roster Management."""
if ctx.invoked_subcommand is None:
await bot.say('Invalid roster command passed. Must be *add*, *edit*, *list*, or *remove*.')
# COMMAND: !roster add
@roster.command(name='add', pass_context=True)
async def roster_add(ctx, game_abv: str, *, ign: str):
"""Adds username to roster.
User a game abbreviation from the games list. Only one entry per game. Include all in-game names if necessary."""
username = str(ctx.message.author)
# Does Game Abbreviation Exist?
if not is_game_abv(game_abv):
await bot.say('{0.mention}, this abbreviation does not exist. Use !games display for a list of acceptable game '
'abbreviations.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "INSERT INTO roster (`discord_account`,`game_abv`,`game_account`) VALUES (%s, %s, %s)"
cur = db.cursor()
cur.execute(sql, (username, game_abv, ign))
db.commit()
cur.close()
except Exception:
await bot.say('{0.message.author.mention}, there was an error adding your information to the roster.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, your information was successfully added to the roster!'.format(ctx))
# COMMAND: !roster edit
@roster.command(name='edit', pass_context=True)
async def roster_edit(ctx, game_abv: str, *, ign: str):
"""Updates a roster entry for a specific game.
If the either Game Name or your in-Game Name have spaces, put them in quotes."""
username = str(ctx.message.author)
# Does Game Abbreviation Exist?
if not is_game_abv(game_abv):
await bot.say('{0.mention}, this abbreviation does not exist. Use !games display for a list of acceptable game'
' abbreviations.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "UPDATE roster SET `game_account` = %s WHERE `discord_account` = %s AND `game_abv` = %s"
cur = db.cursor()
cur.execute(sql, (ign, username, game_abv))
db.commit()
cur.close()
except Exception:
await bot.say('{0.message.author.mention}, there was an error updating your roster information.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, your roster information was successfully updated!'.format(ctx))
# COMMAND: !roster remove
@roster.command(name='remove', pass_context=True)
async def roster_remove(ctx, game_abv: str, *, ign: str):
"""Removes a user's entries in the roster for the specified game."""
username = str(ctx.message.author)
# Does Game Abbreviation Exist?
if not is_game_abv(game_abv):
await bot.say('{0.mention}, this abbreviation does not exist. Use !games display for a list of acceptable '
'game abbreviations.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "DELETE FROM roster WHERE `discord_account` = %s AND `game_abv` = %s AND `game_account` = %s"
cur = db.cursor()
cur.execute(sql, (username, game_abv, ign))
db.commit()
cur.close()
except Exception:
await bot.say('{0.message.author.mention}, there was an error deleting your roster information.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, your roster information was successfully deleted!'.format(ctx))
# COMMAND: !roster list
@roster.command(name='list', pass_context=True)
async def roster_list(ctx, game_abv: str):
"""Sends a message to the user with the current roster for the specified game."""
# Does Game Abbreviation Exist?
if not is_game_abv(game_abv):
await bot.say('{0.mention}, this abbreviation does not exist. Use !games display for a list of acceptable game '
'abbreviations.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "SELECT `discord_account`, `game_account` FROM roster WHERE `game_abv` = %s ORDER BY `discord_account`"
cur = db.cursor()
cur.execute(sql, (game_abv,))
result = cur.fetchall()
cur.close()
except Exception:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an error getting the roster for you. "
"I'm sorry!".format(ctx.message.author))
return
# Create Variables for Embed Table
accounts = ''
names = ''
for row in result:
accounts += (row[0] + '\n')
names += (row[1] + '\n')
# Create Embed Table
embed = discord.Embed()
embed.add_field(name="Discord Account", value=accounts, inline=True)
embed.add_field(name="In-Game Name", value=names, inline=True)
# Send Table to Channel
await bot.send_message(ctx.message.channel, embed=embed)
# ********************************************** #
# GROUPED COMMANDS : RECRUIT ******************* #
# ********************************************** #
# COMMAND: !recruit
@bot.group(pass_context=True)
async def recruit(ctx):
"""Handles Recruitment Post and Invites Management."""
if ctx.invoked_subcommand is None:
await bot.say('Invalid recruitment command passed. Must be *add*, *edit*, *invite*, *list*, or *remove*.')
# COMMAND: !recruit add
@recruit.command(name='add', pass_context=True)
async def recruit_add(ctx, game_abv: str, *, link: str):
"""Adds recruitment post link to the recruitment list. Use a game abbreviation from the games list."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Does Game Abbreviation Exist?
if not is_game_abv(game_abv):
await bot.say(
'{0.mention}, this abbreviation does not exist. Use !games display for a list of acceptable game '
'abbreviations.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "INSERT INTO recruitment (`game`,`link`) VALUES (%s, %s)"
cur = db.cursor()
cur.execute(sql, (game_abv, link))
db.commit()
cur.close()
except Exception:
await bot.say(
'{0.message.author.mention}, there was an error adding your recruitment link to the list.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, your information was successfully added to the recruitment '
'posts list!'.format(ctx))
# COMMAND: !recruit edit
@recruit.command(name='edit', pass_context=True)
async def roster_edit(ctx, entry_id: int, *, link: str):
"""Updates a recruitment post entry with the specified entry ID."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "UPDATE recruitment SET `link` = %s WHERE `entry_id` = %s"
cur = db.cursor()
cur.execute(sql, (link, entry_id))
db.commit()
cur.close()
except Exception:
await bot.say('{0.message.author.mention}, there was an error updating the specified '
'recruitment entry.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, the recruitment entry was successfully updated!'.format(ctx))
# COMMAND: !recruit remove
@recruit.command(name='remove', pass_context=True)
async def recruit_remove(ctx, entry_id: int):
"""Removes an entry for the recruitment posts list with the specified entry ID."""
# Is the user allowed? (Must be staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Handle Database
try:
sql = "DELETE FROM recruitment WHERE `entry_id` = %s"
cur = db.cursor()
cur.execute(sql, (entry_id,))
db.commit()
cur.close()
except Exception:
await bot.say('{0.message.author.mention}, there was an error deleting the specified '
'recruitment entry.'.format(ctx))
return
# Display Success Message
await bot.say('{0.message.author.mention}, the recruitment entry was successfully deleted!'.format(ctx))
# COMMAND: !recruit list
@recruit.command(name='list', pass_context=True)
async def recruit_list(ctx):
"""Lists all recruitment post entries in the system."""
# Handle Database
try:
sql = "SELECT * FROM recruitment ORDER BY `game`"
cur = db.cursor()
cur.execute(sql)
result = cur.fetchall()
cur.close()
except Exception:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an error getting the recruitment list "
"for you. I'm sorry!".format(ctx.message.author))
return
# Create Variables for Embed Table
entries = ''
game_abvs = ''
links = ''
for row in result:
entries += (row[0] + '\n')
game_abvs += (row[1] + '\n')
links += (row[2] + '\n')
# Create Embed Table
embed = discord.Embed()
embed.add_field(name="ID", value=entries, inline=True)
embed.add_field(name="Game", value=game_abvs, inline=True)
embed.add_field(name="Link", value=links, inline=True)
# Send Table to Channel
await bot.send_message(ctx.message.channel, embed=embed)
# COMMAND: !recruit invite
@recruit.command(name='invite')
async def recruit_invite(duration: int):
"""Provides an invite link to the Discord server. Set duration to 0 for permanent invite."""
# Default Duration 30 Minutes, Else Convert to Minutes
if duration is None:
duration = 1800
else:
duration *= 60
# WELCOME CHANNEL ID: 141622052133142529
welcome_channel = bot.get_channel('141622052133142529')
# Create the Invite
new_invite = await bot.create_invite(welcome_channel, max_age=duration)
# Send Message with Invite Link
await bot.say('Your newly generated invite link is: {0.url}'.format(new_invite))
# ********************************************** #
# MODERATOR COMMANDS *************************** #
# ********************************************** #
# COMMAND: !give_role
@bot.command(pass_context=True)
async def give_role(ctx, username: str, *, role_name: str):
"""Assigns a role to a user."""
# List of Roles Staff Can Add To.
allowed_roles = ['Europe',
'North America',
'Oceania',
'Overwatch',
'League of Legends',
'Co-op',
'Minna-chan',
'Squire',
'Knight',
'Zealot']
# Is the user allowed? (Must be Staff)
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
if role_name not in allowed_roles:
await bot.say('{0.mention}, you may only assign users to public roles, Guest, or Registered Member'
.format(ctx.message.author))
return
# Define role, then add role to member.
try:
role = discord.utils.get(ctx.message.server.roles, name=role_name)
user = discord.utils.get(ctx.message.server.members, name=username)
await bot.add_roles(user, role)
except Exception as e:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an granting the role to the user."
" ".format(ctx.message.author) + str(e))
return
# Success Message
await bot.say('{0.mention}, you have successfully added **{1}** to the group **{2}**'
'.'.format(ctx.message.author, username, role_name))
# COMMAND: !kick
@bot.command(name='kick', pass_context=True)
async def mod_kick(ctx, username: str, *, reason: str):
"""Kicks a user from the server."""
# User must be a staff member
if not is_staff(ctx.message.author):
await bot.say('{0.mention}, you must be a staff member to use this command.'.format(ctx.message.author))
return
# Add to DB and Post Message
try:
# Variables Needed
member = discord.utils.get(ctx.message.server.members, name=username)
staffer = ctx.message.author
# Handle Database
sql = "INSERT INTO mod_log (`action`,`user`, `user_id`, `staff`, `staff_id`, reason) " \
"VALUES ('kick', %s, %s, %s, %s, %s)"
cur = db.cursor()
cur.execute(sql, (str(member), member.id, str(staffer), staffer.id, reason))
# Save Last Row ID
case_id = cur.lastrowid
# Insert Message
log_channel = bot.get_channel('303262467205890051')
msg_text = "**Case #{0}** | Kick :boot: \n**User**: {1} ({2}) " \
"\n**Moderator**: {3} ({4}) \n**Reason**: {5}"
# Add Message to Events Channel and Save Message ID
case_message = await bot.send_message(log_channel, msg_text.format(case_id, str(member), member.id, str(staffer), staffer.id, reason))
cur.execute("UPDATE mod_log SET `message_id` = %s WHERE `case_id` = %s", (case_message.id, case_id))
# Finish Database Stuff and Commit
db.commit()
cur.close()
# Kick the Member
await bot.kick(member)
except Exception as e:
await bot.send_message(ctx.message.channel, "{0.mention}, there was an error when kicking the user."
" ".format(ctx.message.author) + str(e))
await bot.say("{0.mention}, the user was successfully kicked. A log entry has been added.".format(ctx.message.author))
# ********************************************** #
# START THE BOT ******************************** #
# ********************************************** #
# Run the Bot
bot.run('token-here')
|
gpl-3.0
| 2,852,675,339,850,913,300 | 33.866518 | 142 | 0.58759 | false | 3.716945 | false | false | false |
nicain/dipde_dev
|
dipde/interfaces/zmq/__init__.py
|
1
|
4371
|
import time
import zmq
import threading
context = zmq.Context()
class PublishCallback(object):
def __init__(self, port, topic, message_callback):
self.port = port
self.topic = topic
self.message_callback = message_callback
self.socket = context.socket(zmq.PUB)
def __call__(self, obj):
message_to_send = list(self.message_callback(obj))
message_to_send.insert(0,"%s" % self.topic)
self.socket.send_multipart(map(str, message_to_send))
class PublishCallbackConnect(PublishCallback):
def __init__(self, port, topic, message_callback):
super(self.__class__, self).__init__(port, topic, message_callback)
self.socket.connect("tcp://localhost:%s" % self.port)
class CallbackSubscriber(object):
def __init__(self, port=None, receive_callback=None):
self.socket = context.socket(zmq.SUB)
if port is None:
self.port = self.socket.bind_to_random_port('tcp://*', min_port=6001, max_port=6004, max_tries=100)
else:
self.socket.bind("tcp://*:%s" % port)
self.port = port
self.socket.setsockopt(zmq.SUBSCRIBE, 'test')
if receive_callback is None:
def receive_callback(received_message):
print received_message
self.receive_callback = receive_callback
def run(self):
while True:
received_message_multipart = self.socket.recv_multipart()
topic = received_message_multipart[0]
received_message = received_message_multipart[1:]
self.receive_callback(received_message)
class CallbackSubscriberThread(threading.Thread):
def __init__(self, port=None):
super(self.__class__, self).__init__()
self.subscriber = CallbackSubscriber(port)
self.daemon = True
def run(self, port=None):
self.subscriber.run()
@property
def port(self):
return self.subscriber.port
class RequestConnection(object):
def __init__(self, port):
self.port = port
self.socket = context.socket(zmq.REQ)
self.socket.connect("tcp://localhost:%s" % port)
def __call__(self, *args):
if len(args) == 0:
self.socket.send(b'')
else:
self.socket.send_multipart(map(str,args))
message = self.socket.recv_multipart()
return float(message[0])
def shutdown(self):
self.socket.close()
assert self.socket.closed
class ReplyServerBind(object):
def __init__(self, reply_function, port=None):
self.socket = context.socket(zmq.REP)
if port is None:
self.port = self.socket.bind_to_random_port('tcp://*', min_port=6001, max_port=6004, max_tries=100)
else:
self.socket.bind("tcp://*:%s" % port)
self.port = port
self.reply_function = reply_function
def run(self):
while True:
message = self.socket.recv()
# print 'message:', message, type(message)
if message == 'SHUTDOWN':
break
# print 'message'
if message == '':
requested_args = tuple()
else:
requested_args = tuple([float(message)])
self.socket.send_multipart([b"%s" % self.reply_function(*requested_args)])
self.socket.send('DOWN')
self.socket.close()
class ReplyServerThread(threading.Thread):
def __init__(self, reply_function, port=None):
super(ReplyServerThread, self).__init__()
self._stop = threading.Event()
self.daemon = True
self.reply_function = reply_function
self.server = ReplyServerBind(self.reply_function, port=port)
def run(self, port=None):
self.server.run()
def shutdown(self):
shutdown_socket = context.socket(zmq.REQ)
shutdown_socket.connect("tcp://localhost:%s" % self.port)
shutdown_socket.send('SHUTDOWN')
message = shutdown_socket.recv()
assert message == 'DOWN'
self.stop()
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.isSet()
@property
def port(self):
return self.server.port
|
gpl-3.0
| -8,901,712,531,665,347,000 | 27.94702 | 111 | 0.577442 | false | 4.028571 | false | false | false |
yantrabuddhi/nativeclient
|
buildbot/buildbot_lib.py
|
1
|
21952
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os.path
import shutil
import subprocess
import stat
import sys
import time
import traceback
ARCH_MAP = {
'32': {
'gyp_arch': 'ia32',
'scons_platform': 'x86-32',
},
'64': {
'gyp_arch': 'x64',
'scons_platform': 'x86-64',
},
'arm': {
'gyp_arch': 'arm',
'scons_platform': 'arm',
},
'mips32': {
'gyp_arch': 'mips32',
'scons_platform': 'mips32',
},
}
def RunningOnBuildbot():
return os.environ.get('BUILDBOT_SLAVE_TYPE') is not None
def GetHostPlatform():
sys_platform = sys.platform.lower()
if sys_platform.startswith('linux'):
return 'linux'
elif sys_platform in ('win', 'win32', 'windows', 'cygwin'):
return 'win'
elif sys_platform in ('darwin', 'mac'):
return 'mac'
else:
raise Exception('Can not determine the platform!')
def SetDefaultContextAttributes(context):
"""
Set default values for the attributes needed by the SCons function, so that
SCons can be run without needing ParseStandardCommandLine
"""
platform = GetHostPlatform()
context['platform'] = platform
context['mode'] = 'opt'
context['default_scons_mode'] = ['opt-host', 'nacl']
context['default_scons_platform'] = ('x86-64' if platform == 'win'
else 'x86-32')
context['android'] = False
context['clang'] = False
context['asan'] = False
context['pnacl'] = False
context['use_glibc'] = False
context['use_breakpad_tools'] = False
context['max_jobs'] = 8
context['scons_args'] = []
# Windows-specific environment manipulation
def SetupWindowsEnvironment(context):
# Poke around looking for MSVC. We should do something more principled in
# the future.
# The name of Program Files can differ, depending on the bittage of Windows.
program_files = r'c:\Program Files (x86)'
if not os.path.exists(program_files):
program_files = r'c:\Program Files'
if not os.path.exists(program_files):
raise Exception('Cannot find the Program Files directory!')
# The location of MSVC can differ depending on the version.
msvc_locs = [
('Microsoft Visual Studio 12.0', 'VS120COMNTOOLS', '2013'),
('Microsoft Visual Studio 10.0', 'VS100COMNTOOLS', '2010'),
('Microsoft Visual Studio 9.0', 'VS90COMNTOOLS', '2008'),
('Microsoft Visual Studio 8.0', 'VS80COMNTOOLS', '2005'),
]
for dirname, comntools_var, gyp_msvs_version in msvc_locs:
msvc = os.path.join(program_files, dirname)
context.SetEnv('GYP_MSVS_VERSION', gyp_msvs_version)
if os.path.exists(msvc):
break
else:
# The break statement did not execute.
raise Exception('Cannot find MSVC!')
# Put MSVC in the path.
vc = os.path.join(msvc, 'VC')
comntools = os.path.join(msvc, 'Common7', 'Tools')
perf = os.path.join(msvc, 'Team Tools', 'Performance Tools')
context.SetEnv('PATH', os.pathsep.join([
context.GetEnv('PATH'),
vc,
comntools,
perf]))
# SCons needs this variable to find vsvars.bat.
# The end slash is needed because the batch files expect it.
context.SetEnv(comntools_var, comntools + '\\')
# This environment variable will SCons to print debug info while it searches
# for MSVC.
context.SetEnv('SCONS_MSCOMMON_DEBUG', '-')
# Needed for finding devenv.
context['msvc'] = msvc
SetupGyp(context, [])
def SetupGyp(context, extra_vars=[]):
if RunningOnBuildbot():
goma_opts = [
'use_goma=1',
'gomadir=/b/build/goma',
]
else:
goma_opts = []
context.SetEnv('GYP_DEFINES', ' '.join(
context['gyp_vars'] + goma_opts + extra_vars))
def SetupLinuxEnvironment(context):
if context['arch'] == 'mips32':
# Ensure the trusted mips toolchain is installed.
cmd = ['build/package_version/package_version.py', '--packages',
'linux_x86/mips_trusted', 'sync', '-x']
Command(context, cmd)
SetupGyp(context, ['target_arch='+context['gyp_arch']])
def SetupMacEnvironment(context):
SetupGyp(context, ['target_arch='+context['gyp_arch']])
def SetupAndroidEnvironment(context):
SetupGyp(context, ['OS=android', 'target_arch='+context['gyp_arch']])
context.SetEnv('GYP_CROSSCOMPILE', '1')
def ParseStandardCommandLine(context):
"""
The standard buildbot scripts require 3 arguments to run. The first
argument (dbg/opt) controls if the build is a debug or a release build. The
second argument (32/64) controls the machine architecture being targeted.
The third argument (newlib/glibc) controls which c library we're using for
the nexes. Different buildbots may have different sets of arguments.
"""
parser = optparse.OptionParser()
parser.add_option('-n', '--dry-run', dest='dry_run', default=False,
action='store_true', help='Do not execute any commands.')
parser.add_option('--inside-toolchain', dest='inside_toolchain',
default=bool(os.environ.get('INSIDE_TOOLCHAIN')),
action='store_true', help='Inside toolchain build.')
parser.add_option('--android', dest='android', default=False,
action='store_true', help='Build for Android.')
parser.add_option('--clang', dest='clang', default=False,
action='store_true', help='Build trusted code with Clang.')
parser.add_option('--coverage', dest='coverage', default=False,
action='store_true',
help='Build and test for code coverage.')
parser.add_option('--validator', dest='validator', default=False,
action='store_true',
help='Only run validator regression test')
parser.add_option('--asan', dest='asan', default=False,
action='store_true', help='Build trusted code with ASan.')
parser.add_option('--scons-args', dest='scons_args', default =[],
action='append', help='Extra scons arguments.')
parser.add_option('--step-suffix', metavar='SUFFIX', default='',
help='Append SUFFIX to buildbot step names.')
parser.add_option('--no-gyp', dest='no_gyp', default=False,
action='store_true', help='Do not run the gyp build')
parser.add_option('--no-goma', dest='no_goma', default=False,
action='store_true', help='Do not run with goma')
parser.add_option('--use-breakpad-tools', dest='use_breakpad_tools',
default=False, action='store_true',
help='Use breakpad tools for testing')
parser.add_option('--skip-build', dest='skip_build', default=False,
action='store_true',
help='Skip building steps in buildbot_pnacl')
parser.add_option('--skip-run', dest='skip_run', default=False,
action='store_true',
help='Skip test-running steps in buildbot_pnacl')
options, args = parser.parse_args()
if len(args) != 3:
parser.error('Expected 3 arguments: mode arch toolchain')
# script + 3 args == 4
mode, arch, toolchain = args
if mode not in ('dbg', 'opt', 'coverage'):
parser.error('Invalid mode %r' % mode)
if arch not in ARCH_MAP:
parser.error('Invalid arch %r' % arch)
if toolchain not in ('newlib', 'glibc', 'pnacl', 'nacl_clang'):
parser.error('Invalid toolchain %r' % toolchain)
# TODO(ncbray) allow a command-line override
platform = GetHostPlatform()
context['platform'] = platform
context['mode'] = mode
context['arch'] = arch
context['android'] = options.android
# ASan is Clang, so set the flag to simplify other checks.
context['clang'] = options.clang or options.asan
context['validator'] = options.validator
context['asan'] = options.asan
# TODO(ncbray) turn derived values into methods.
context['gyp_mode'] = {
'opt': 'Release',
'dbg': 'Debug',
'coverage': 'Debug'}[mode]
context['gn_is_debug'] = {
'opt': 'false',
'dbg': 'true',
'coverage': 'true'}[mode]
context['gyp_arch'] = ARCH_MAP[arch]['gyp_arch']
context['gyp_vars'] = []
if context['clang']:
context['gyp_vars'].append('clang=1')
if context['asan']:
context['gyp_vars'].append('asan=1')
context['default_scons_platform'] = ARCH_MAP[arch]['scons_platform']
context['default_scons_mode'] = ['nacl']
# Only Linux can build trusted code on ARM.
# TODO(mcgrathr): clean this up somehow
if arch != 'arm' or platform == 'linux':
context['default_scons_mode'] += [mode + '-host']
context['use_glibc'] = toolchain == 'glibc'
context['pnacl'] = toolchain == 'pnacl'
context['nacl_clang'] = toolchain == 'nacl_clang'
context['max_jobs'] = 8
context['dry_run'] = options.dry_run
context['inside_toolchain'] = options.inside_toolchain
context['step_suffix'] = options.step_suffix
context['no_gyp'] = options.no_gyp
context['no_goma'] = options.no_goma
context['coverage'] = options.coverage
context['use_breakpad_tools'] = options.use_breakpad_tools
context['scons_args'] = options.scons_args
context['skip_build'] = options.skip_build
context['skip_run'] = options.skip_run
# Don't run gyp on coverage builds.
if context['coverage']:
context['no_gyp'] = True
for key, value in sorted(context.config.items()):
print '%s=%s' % (key, value)
def EnsureDirectoryExists(path):
"""
Create a directory if it does not already exist.
Does not mask failures, but there really shouldn't be any.
"""
if not os.path.exists(path):
os.makedirs(path)
def TryToCleanContents(path, file_name_filter=lambda fn: True):
"""
Remove the contents of a directory without touching the directory itself.
Ignores all failures.
"""
if os.path.exists(path):
for fn in os.listdir(path):
TryToCleanPath(os.path.join(path, fn), file_name_filter)
def TryToCleanPath(path, file_name_filter=lambda fn: True):
"""
Removes a file or directory.
Ignores all failures.
"""
if os.path.exists(path):
if file_name_filter(path):
print 'Trying to remove %s' % path
try:
RemovePath(path)
except Exception:
print 'Failed to remove %s' % path
else:
print 'Skipping %s' % path
def Retry(op, *args):
# Windows seems to be prone to having commands that delete files or
# directories fail. We currently do not have a complete understanding why,
# and as a workaround we simply retry the command a few times.
# It appears that file locks are hanging around longer than they should. This
# may be a secondary effect of processes hanging around longer than they
# should. This may be because when we kill a browser sel_ldr does not exit
# immediately, etc.
# Virus checkers can also accidently prevent files from being deleted, but
# that shouldn't be a problem on the bots.
if GetHostPlatform() == 'win':
count = 0
while True:
try:
op(*args)
break
except Exception:
print "FAILED: %s %s" % (op.__name__, repr(args))
count += 1
if count < 5:
print "RETRY: %s %s" % (op.__name__, repr(args))
time.sleep(pow(2, count))
else:
# Don't mask the exception.
raise
else:
op(*args)
def PermissionsFixOnError(func, path, exc_info):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
def _RemoveDirectory(path):
print 'Removing %s' % path
if os.path.exists(path):
shutil.rmtree(path, onerror=PermissionsFixOnError)
print ' Succeeded.'
else:
print ' Path does not exist, nothing to do.'
def RemoveDirectory(path):
"""
Remove a directory if it exists.
Does not mask failures, although it does retry a few times on Windows.
"""
Retry(_RemoveDirectory, path)
def RemovePath(path):
"""Remove a path, file or directory."""
if os.path.isdir(path):
RemoveDirectory(path)
else:
if os.path.isfile(path) and not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
os.remove(path)
# This is a sanity check so Command can print out better error information.
def FileCanBeFound(name, paths):
# CWD
if os.path.exists(name):
return True
# Paths with directories are not resolved using the PATH variable.
if os.path.dirname(name):
return False
# In path
for path in paths.split(os.pathsep):
full = os.path.join(path, name)
if os.path.exists(full):
return True
return False
def RemoveGypBuildDirectories():
# Remove all directories on all platforms. Overkill, but it allows for
# straight-line code.
# Windows
RemoveDirectory('build/Debug')
RemoveDirectory('build/Release')
RemoveDirectory('build/Debug-Win32')
RemoveDirectory('build/Release-Win32')
RemoveDirectory('build/Debug-x64')
RemoveDirectory('build/Release-x64')
# Linux and Mac
RemoveDirectory('../xcodebuild')
RemoveDirectory('../out')
RemoveDirectory('src/third_party/nacl_sdk/arm-newlib')
def RemoveSconsBuildDirectories():
RemoveDirectory('scons-out')
RemoveDirectory('breakpad-out')
# Execute a command using Python's subprocess module.
def Command(context, cmd, cwd=None):
print 'Running command: %s' % ' '.join(cmd)
# Python's subprocess has a quirk. A subprocess can execute with an
# arbitrary, user-defined environment. The first argument of the command,
# however, is located using the PATH variable of the Python script that is
# launching the subprocess. Modifying the PATH in the environment passed to
# the subprocess does not affect Python's search for the first argument of
# the command (the executable file.) This is a little counter intuitive,
# so we're forcing the search to use the same PATH variable as is seen by
# the subprocess.
env = context.MakeCommandEnv()
script_path = os.environ['PATH']
os.environ['PATH'] = env['PATH']
try:
if FileCanBeFound(cmd[0], env['PATH']) or context['dry_run']:
# Make sure that print statements before the subprocess call have been
# flushed, otherwise the output of the subprocess call may appear before
# the print statements.
sys.stdout.flush()
if context['dry_run']:
retcode = 0
else:
retcode = subprocess.call(cmd, cwd=cwd, env=env)
else:
# Provide a nicer failure message.
# If subprocess cannot find the executable, it will throw a cryptic
# exception.
print 'Executable %r cannot be found.' % cmd[0]
retcode = 1
finally:
os.environ['PATH'] = script_path
print 'Command return code: %d' % retcode
if retcode != 0:
raise StepFailed()
return retcode
# A specialized version of CommandStep.
def SCons(context, mode=None, platform=None, parallel=False, browser_test=False,
args=(), cwd=None):
python = sys.executable
if mode is None: mode = context['default_scons_mode']
if platform is None: platform = context['default_scons_platform']
if parallel:
jobs = context['max_jobs']
else:
jobs = 1
cmd = []
if browser_test and context.Linux():
# Although we could use the "browser_headless=1" Scons option, it runs
# xvfb-run once per Chromium invocation. This is good for isolating
# the tests, but xvfb-run has a stupid fixed-period sleep, which would
# slow down the tests unnecessarily.
cmd.extend(['xvfb-run', '--auto-servernum'])
cmd.extend([
python, 'scons.py',
'--verbose',
'-k',
'-j%d' % jobs,
'--mode='+','.join(mode),
'platform='+platform,
])
cmd.extend(context['scons_args'])
if context['clang']: cmd.append('--clang')
if context['asan']: cmd.append('--asan')
if context['use_glibc']: cmd.append('--nacl_glibc')
if context['pnacl']: cmd.append('bitcode=1')
if context['nacl_clang']: cmd.append('nacl_clang=1')
if context['use_breakpad_tools']:
cmd.append('breakpad_tools_dir=breakpad-out')
if context['android']:
cmd.append('android=1')
# Append used-specified arguments.
cmd.extend(args)
Command(context, cmd, cwd)
class StepFailed(Exception):
"""
Thrown when the step has failed.
"""
class StopBuild(Exception):
"""
Thrown when the entire build should stop. This does not indicate a failure,
in of itself.
"""
class Step(object):
"""
This class is used in conjunction with a Python "with" statement to ensure
that the preamble and postamble of each build step gets printed and failures
get logged. This class also ensures that exceptions thrown inside a "with"
statement don't take down the entire build.
"""
def __init__(self, name, status, halt_on_fail=True):
self.status = status
if 'step_suffix' in status.context:
suffix = status.context['step_suffix']
else:
suffix = ''
self.name = name + suffix
self.halt_on_fail = halt_on_fail
self.step_failed = False
# Called on entry to a 'with' block.
def __enter__(self):
sys.stdout.flush()
print
print '@@@BUILD_STEP %s@@@' % self.name
self.status.ReportBegin(self.name)
# The method is called on exit from a 'with' block - even for non-local
# control flow, i.e. exceptions, breaks, continues, returns, etc.
# If an exception is thrown inside a block wrapped with a 'with' statement,
# the __exit__ handler can suppress the exception by returning True. This is
# used to isolate each step in the build - if an exception occurs in a given
# step, the step is treated as a failure. This allows the postamble for each
# step to be printed and also allows the build to continue of the failure of
# a given step doesn't halt the build.
def __exit__(self, type, exception, trace):
sys.stdout.flush()
if exception is None:
# If exception is None, no exception occurred.
step_failed = False
elif isinstance(exception, StepFailed):
step_failed = True
print
print 'Halting build step because of failure.'
print
else:
step_failed = True
print
print 'The build step threw an exception...'
print
traceback.print_exception(type, exception, trace, file=sys.stdout)
print
if step_failed:
self.status.ReportFail(self.name)
print '@@@STEP_FAILURE@@@'
if self.halt_on_fail:
print
print 'Entire build halted because %s failed.' % self.name
sys.stdout.flush()
raise StopBuild()
else:
self.status.ReportPass(self.name)
sys.stdout.flush()
# Suppress any exception that occurred.
return True
# Adds an arbitrary link inside the build stage on the waterfall.
def StepLink(text, link):
print '@@@STEP_LINK@%s@%s@@@' % (text, link)
# Adds arbitrary text inside the build stage on the waterfall.
def StepText(text):
print '@@@STEP_TEXT@%s@@@' % (text)
class BuildStatus(object):
"""
Keeps track of the overall status of the build.
"""
def __init__(self, context):
self.context = context
self.ever_failed = False
self.steps = []
def ReportBegin(self, name):
pass
def ReportPass(self, name):
self.steps.append((name, 'passed'))
def ReportFail(self, name):
self.steps.append((name, 'failed'))
self.ever_failed = True
# Handy info when this script is run outside of the buildbot.
def DisplayBuildStatus(self):
print
for step, status in self.steps:
print '%-40s[%s]' % (step, status)
print
if self.ever_failed:
print 'Build failed.'
else:
print 'Build succeeded.'
def ReturnValue(self):
return int(self.ever_failed)
class BuildContext(object):
"""
Encapsulates the information needed for running a build command. This
includes environment variables and default arguments for SCons invocations.
"""
# Only allow these attributes on objects of this type.
__slots__ = ['status', 'global_env', 'config']
def __init__(self):
# The contents of global_env override os.environ for any commands run via
# self.Command(...)
self.global_env = {}
# PATH is a special case. See: Command.
self.global_env['PATH'] = os.environ.get('PATH', '')
self.config = {}
self['dry_run'] = False
# Emulate dictionary subscripting.
def __getitem__(self, key):
return self.config[key]
# Emulate dictionary subscripting.
def __setitem__(self, key, value):
self.config[key] = value
# Emulate dictionary membership test
def __contains__(self, key):
return key in self.config
def Windows(self):
return self.config['platform'] == 'win'
def Linux(self):
return self.config['platform'] == 'linux'
def Mac(self):
return self.config['platform'] == 'mac'
def GetEnv(self, name, default=None):
return self.global_env.get(name, default)
def SetEnv(self, name, value):
self.global_env[name] = str(value)
def MakeCommandEnv(self):
# The external environment is not sanitized.
e = dict(os.environ)
# Arbitrary variables can be overridden.
e.update(self.global_env)
return e
def RunBuild(script, status):
try:
script(status, status.context)
except StopBuild:
pass
# Emit a summary step for three reasons:
# - The annotator will attribute non-zero exit status to the last build step.
# This can misattribute failures to the last build step.
# - runtest.py wraps the builds to scrape perf data. It emits an annotator
# tag on exit which misattributes perf results to the last build step.
# - Provide a label step in which to show summary result.
# Otherwise these go back to the preamble.
with Step('summary', status):
if status.ever_failed:
print 'There were failed stages.'
else:
print 'Success.'
# Display a summary of the build.
status.DisplayBuildStatus()
sys.exit(status.ReturnValue())
|
bsd-3-clause
| -6,223,911,041,280,375,000 | 30.722543 | 80 | 0.654246 | false | 3.708108 | false | false | false |
tjcsl/director
|
web3/apps/sites/migrations/0001_initial.py
|
1
|
1297
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-05 23:20
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('users', '0002_auto_20161105_2046'),
]
operations = [
migrations.CreateModel(
name='Website',
fields=[
('id', models.PositiveIntegerField(primary_key=True, serialize=False, validators=[django.core.validators.MinValueValidator(1000)])),
('name', models.CharField(max_length=32, unique=True)),
('category', models.CharField(choices=[('legacy', 'legacy'), ('static', 'static'), ('php', 'php'), ('dynamic', 'dynamic')], max_length=16)),
('purpose', models.CharField(choices=[('user', 'user'), ('activity', 'activity')], max_length=16)),
('domain', models.TextField()),
('description', models.TextField()),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.Group')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.User')),
],
),
]
|
mit
| -8,739,404,138,227,232,000 | 39.53125 | 156 | 0.596762 | false | 4.183871 | false | false | false |
TAMU-CPT/galaxy-tools
|
tools/gff3/gff3_filter.py
|
1
|
1553
|
#!/usr/bin/env python
import sys
import logging
import argparse
from cpt_gffParser import gffParse, gffWrite
from gff3 import feature_lambda, feature_test_qual_value
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
def gff_filter(gff3, id_list=None, id="", attribute_field="ID", subfeatures=True):
attribute_field = attribute_field.split("__cn__")
if id_list:
filter_strings = [line.strip() for line in id_list]
else:
filter_strings = [x.strip() for x in id.split("__cn__")]
for rec in gffParse(gff3):
rec.features = feature_lambda(
rec.features,
feature_test_qual_value,
{"qualifier": attribute_field, "attribute_list": filter_strings},
subfeatures=subfeatures,
)
rec.annotations = {}
gffWrite([rec], sys.stdout)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="extract features from a GFF3 file based on ID/qualifiers"
)
parser.add_argument("gff3", type=argparse.FileType("r"), help="GFF3 annotations")
parser.add_argument("--id_list", type=argparse.FileType("r"))
parser.add_argument("--id", type=str)
parser.add_argument(
"--attribute_field",
type=str,
help="Column 9 Field to search against",
default="ID",
)
parser.add_argument(
"--subfeatures",
action="store_true",
help="Retain subfeature tree of matched features",
)
args = parser.parse_args()
gff_filter(**vars(args))
|
gpl-3.0
| 2,550,448,760,510,067,700 | 31.354167 | 85 | 0.627817 | false | 3.688836 | false | false | false |
greggian/TapdIn
|
django/contrib/localflavor/us/models.py
|
1
|
1132
|
from django.conf import settings
from django.db.models.fields import Field
class USStateField(Field):
def get_internal_type(self):
return "USStateField"
def db_type(self):
if settings.DATABASE_ENGINE == 'oracle':
return 'CHAR(2)'
else:
return 'varchar(2)'
def formfield(self, **kwargs):
from django.contrib.localflavor.us.forms import USStateSelect
defaults = {'widget': USStateSelect}
defaults.update(kwargs)
return super(USStateField, self).formfield(**defaults)
class PhoneNumberField(Field):
def get_internal_type(self):
return "PhoneNumberField"
def db_type(self):
if settings.DATABASE_ENGINE == 'oracle':
return 'VARCHAR2(20)'
else:
return 'varchar(20)'
def formfield(self, **kwargs):
from django.contrib.localflavor.us.forms import USPhoneNumberField
defaults = {'form_class': USPhoneNumberField}
defaults.update(kwargs)
return super(PhoneNumberField, self).formfield(**defaults)
|
apache-2.0
| 2,579,539,055,631,886,000 | 30.342857 | 74 | 0.614841 | false | 4.337165 | false | false | false |
seraphlnWu/in_trip
|
in_trip/scripts/change_data_from_hbase_to_pg.py
|
1
|
1620
|
#coding=utf-8
import time
import cPickle
from in_trip.store_data.views import pg_db,conn
import logging
logger = logging.getLogger('parser')
def creat_table():
sql_str = '''
create table "tmp_hbase_to_pg"(
data text,
timestamp float(24)
)
'''
pg_db.execute(sql_str)
conn.commit()
def insert_data(o_dict, default_value):
data =cPickle.dumps({
'o_dict' : o_dict,
'default_value' : default_value
})
sql_str = '''
insert into tmp_hbase_to_pg
(data,timestamp)
values
(%s,%s);
'''
try:
pg_db.execute(sql_str,(data,time.time()))
conn.commit()
except Exception as e:
conn.rollback()
logger.error('insert to pg error: %s', e)
def get_data_all():
sql_str = '''
select * from tmp_hbase_to_pg;
'''
pg_db.execute(sql_str)
print pg_db.fetchall()
def get_data(offset,limit=1000):
sql_str = '''
select * from tmp_hbase_to_pg limit(%s) offset(%s);
'''
pg_db.execute(sql_str,(limit,offset))
return pg_db.fetchall()
def insert_into_hbase():
from in_trip.store_data.hbase.run import insert_data as hbase_insert
offset = 0
limit = 1000
while True:
res_list = get_data(offset,limit)
if not res_list:
break
offset = offset + limit
for item in res_list:
tmp_data = cPickle.loads(item[0])
hbase_insert(tmp_data['o_dict'],tmp_data['default_value'])
return True
if __name__ == "__main__":
creat_table()
print "success!"
|
mit
| 5,948,230,377,055,756,000 | 22.478261 | 72 | 0.557407 | false | 3.347107 | false | false | false |
fallen/artiq
|
artiq/frontend/artiq_run.py
|
1
|
4103
|
#!/usr/bin/env python3
# Copyright (C) 2014, 2015 M-Labs Limited
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
import argparse
import sys
import time
from operator import itemgetter
from itertools import chain
import logging
import h5py
from artiq.language.environment import EnvExperiment
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DeviceManager, ResultDB
from artiq.tools import *
logger = logging.getLogger(__name__)
class ELFRunner(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("file")
def run(self):
with open(self.file, "rb") as f:
self.core.comm.load(f.read())
self.core.comm.run("run")
self.core.comm.serve(dict(), dict())
class SimpleParamLogger:
def set(self, timestamp, name, value):
logger.info("Parameter change: {} = {}".format(name, value))
class DummyScheduler:
def __init__(self):
self.next_rid = 0
self.pipeline_name = "main"
self.priority = 0
self.expid = None
def submit(self, pipeline_name, expid, priority, due_date, flush):
rid = self.next_rid
self.next_rid += 1
logger.info("Submitting: %s, RID=%s", expid, rid)
return rid
def delete(self, rid):
logger.info("Deleting RID %s", rid)
def pause(self):
pass
def get_argparser(with_file=True):
parser = argparse.ArgumentParser(
description="Local experiment running tool")
verbosity_args(parser)
parser.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("-p", "--pdb", default="pdb.pyon",
help="parameter database file")
parser.add_argument("-e", "--experiment", default=None,
help="experiment to run")
parser.add_argument("-o", "--hdf5", default=None,
help="write results to specified HDF5 file"
" (default: print them)")
if with_file:
parser.add_argument("file",
help="file containing the experiment to run")
parser.add_argument("arguments", nargs="*",
help="run arguments")
return parser
def _build_experiment(dmgr, pdb, rdb, args):
if hasattr(args, "file"):
if args.file.endswith(".elf"):
if args.arguments:
raise ValueError("arguments not supported for ELF kernels")
if args.experiment:
raise ValueError("experiment-by-name not supported "
"for ELF kernels")
return ELFRunner(dmgr, pdb, rdb, file=args.file)
else:
module = file_import(args.file)
file = args.file
else:
module = sys.modules["__main__"]
file = getattr(module, "__file__")
exp = get_experiment(module, args.experiment)
arguments = parse_arguments(args.arguments)
expid = {
"file": file,
"experiment": args.experiment,
"arguments": arguments
}
dmgr.virtual_devices["scheduler"].expid = expid
return exp(dmgr, pdb, rdb, **arguments)
def run(with_file=False):
args = get_argparser(with_file).parse_args()
init_logger(args)
dmgr = DeviceManager(FlatFileDB(args.ddb),
virtual_devices={"scheduler": DummyScheduler()})
pdb = FlatFileDB(args.pdb)
pdb.hooks.append(SimpleParamLogger())
rdb = ResultDB()
try:
exp_inst = _build_experiment(dmgr, pdb, rdb, args)
exp_inst.prepare()
exp_inst.run()
exp_inst.analyze()
finally:
dmgr.close_devices()
if args.hdf5 is not None:
with h5py.File(args.hdf5, "w") as f:
rdb.write_hdf5(f)
elif rdb.rt.read or rdb.nrt:
r = chain(rdb.rt.read.items(), rdb.nrt.items())
for k, v in sorted(r, key=itemgetter(0)):
print("{}: {}".format(k, v))
def main():
return run(with_file=True)
if __name__ == "__main__":
main()
|
gpl-3.0
| -3,275,687,307,934,452,700 | 27.894366 | 75 | 0.58835 | false | 3.76077 | false | false | false |
vntarasov/openpilot
|
selfdrive/debug/get_fingerprint.py
|
1
|
1030
|
#!/usr/bin/env python3
# simple script to get a vehicle fingerprint.
# Instructions:
# - connect to a Panda
# - run selfdrive/boardd/boardd
# - launching this script
# - turn on the car in STOCK MODE (set giraffe switches properly).
# Note: it's very important that the car is in stock mode, in order to collect a complete fingerprint
# - since some messages are published at low frequency, keep this script running for at least 30s,
# until all messages are received at least once
import cereal.messaging as messaging
logcan = messaging.sub_sock('can')
msgs = {}
while True:
lc = messaging.recv_sock(logcan, True)
if lc is None:
continue
for c in lc.can:
# read also msgs sent by EON on CAN bus 0x80 and filter out the
# addr with more than 11 bits
if c.src in [0, 2] and c.address < 0x800:
msgs[c.address] = len(c.dat)
fingerprint = ', '.join("%d: %d" % v for v in sorted(msgs.items()))
print("number of messages {0}:".format(len(msgs)))
print("fingerprint {0}".format(fingerprint))
|
mit
| -3,785,566,846,449,061,400 | 31.1875 | 103 | 0.695146 | false | 3.39934 | false | false | false |
vcoin-project/v
|
qa/rpc-tests/test_framework/bignum.py
|
1
|
1991
|
#
#
# bignum.py
#
# This file is copied from python-vcoinlib.
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# vcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))
|
mit
| -4,014,981,737,356,212,000 | 18.519608 | 82 | 0.52436 | false | 2.868876 | false | false | false |
ultimanet/nifty
|
rg/powerspectrum.py
|
1
|
26583
|
## NIFTY (Numerical Information Field Theory) has been developed at the
## Max-Planck-Institute for Astrophysics.
##
## Copyright (C) 2013 Max-Planck-Society
##
## Author: Marco Selig
## Project homepage: <http://www.mpa-garching.mpg.de/ift/nifty/>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
## See the GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
## TODO: cythonize
from __future__ import division
import numpy as np
def draw_vector_nd(axes,dgrid,ps,symtype=0,fourier=False,zerocentered=False,kpack=None):
"""
Draws a n-dimensional field on a regular grid from a given power
spectrum. The grid parameters need to be specified, together with a
couple of global options explained below. The dimensionality of the
field is determined automatically.
Parameters
----------
axes : ndarray
An array with the length of each axis.
dgrid : ndarray
An array with the pixel length of each axis.
ps : ndarray
The power spectrum as a function of Fourier modes.
symtype : int {0,1,2} : *optional*
Whether the output should be real valued (0), complex-hermitian (1)
or complex without symmetry (2). (default=0)
fourier : bool : *optional*
Whether the output should be in Fourier space or not
(default=False).
zerocentered : bool : *optional*
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
Returns
-------
field : ndarray
The drawn random field.
"""
if(kpack is None):
kdict = np.fft.fftshift(nkdict_fast(axes,dgrid,fourier))
klength = nklength(kdict)
else:
kdict = kpack[1][np.fft.ifftshift(kpack[0],axes=shiftaxes(zerocentered,st_to_zero_mode=False))]
klength = kpack[1]
#output is in position space
if(not fourier):
#output is real-valued
if(symtype==0):
vector = drawherm(klength,kdict,ps)
if(np.any(zerocentered==True)):
return np.real(np.fft.fftshift(np.fft.ifftn(vector),axes=shiftaxes(zerocentered)))
else:
return np.real(np.fft.ifftn(vector))
#output is complex with hermitian symmetry
elif(symtype==1):
vector = drawwild(klength,kdict,ps,real_corr=2)
if(np.any(zerocentered==True)):
return np.fft.fftshift(np.fft.ifftn(np.real(vector)),axes=shiftaxes(zerocentered))
else:
return np.fft.ifftn(np.real(vector))
#output is complex without symmetry
else:
vector = drawwild(klength,kdict,ps)
if(np.any(zerocentered==True)):
return np.fft.fftshift(np.fft.ifftn(vector),axes=shiftaxes(zerocentered))
else:
return np.fft.ifftn(vector)
#output is in fourier space
else:
#output is real-valued
if(symtype==0):
vector = drawwild(klength,kdict,ps,real_corr=2)
if np.any(zerocentered == True):
return np.real(np.fft.fftshift(vector,axes=shiftaxes(zerocentered)))
else:
return np.real(vector)
#output is complex with hermitian symmetry
elif(symtype==1):
vector = drawherm(klength,kdict,ps)
if(np.any(zerocentered==True)):
return np.fft.fftshift(vector,axes=shiftaxes(zerocentered))
else:
return vector
#output is complex without symmetry
else:
vector = drawwild(klength,kdict,ps)
if(np.any(zerocentered==True)):
return np.fft.fftshift(vector,axes=shiftaxes(zerocentered))
else:
return vector
#def calc_ps(field,axes,dgrid,zerocentered=False,fourier=False):
#
# """
# Calculates the power spectrum of a given field assuming that the field
# is statistically homogenous and isotropic.
#
# Parameters
# ----------
# field : ndarray
# The input field from which the power spectrum should be determined.
#
# axes : ndarray
# An array with the length of each axis.
#
# dgrid : ndarray
# An array with the pixel length of each axis.
#
# zerocentered : bool : *optional*
# Whether the output array should be zerocentered, i.e. starting with
# negative Fourier modes going over the zero mode to positive modes,
# or not zerocentered, where zero, positive and negative modes are
# simpy ordered consecutively.
#
# fourier : bool : *optional*
# Whether the output should be in Fourier space or not
# (default=False).
#
# """
#
# ## field absolutes
# if(not fourier):
# foufield = np.fft.fftshift(np.fft.fftn(field))
# elif(np.any(zerocentered==False)):
# foufield = np.fft.fftshift(field, axes=shiftaxes(zerocentered,st_to_zero_mode=True))
# else:
# foufield = field
# fieldabs = np.abs(foufield)**2
#
# kdict = nkdict_fast(axes,dgrid,fourier)
# klength = nklength(kdict)
#
# ## power spectrum
# ps = np.zeros(klength.size)
# rho = np.zeros(klength.size)
# for ii in np.ndindex(kdict.shape):
# position = np.searchsorted(klength,kdict[ii])
# rho[position] += 1
# ps[position] += fieldabs[ii]
# ps = np.divide(ps,rho)
# return ps
def calc_ps_fast(field,axes,dgrid,zerocentered=False,fourier=False,pindex=None,kindex=None,rho=None):
"""
Calculates the power spectrum of a given field faster assuming that the
field is statistically homogenous and isotropic.
Parameters
----------
field : ndarray
The input field from which the power spectrum should be determined.
axes : ndarray
An array with the length of each axis.
dgrid : ndarray
An array with the pixel length of each axis.
zerocentered : bool : *optional*
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
fourier : bool : *optional*
Whether the output should be in Fourier space or not
(default=False).
pindex : ndarray
Index of the Fourier grid points in a numpy.ndarray ordered
following the zerocentered flag (default=None).
kindex : ndarray
Array of all k-vector lengths (default=None).
rho : ndarray
Degeneracy of the Fourier grid, indicating how many k-vectors in
Fourier space have the same length (default=None).
"""
## field absolutes
if(not fourier):
foufield = np.fft.fftshift(np.fft.fftn(field))
elif(np.any(zerocentered==False)):
foufield = np.fft.fftshift(field, axes=shiftaxes(zerocentered,st_to_zero_mode=True))
else:
foufield = field
fieldabs = np.abs(foufield)**2
if(rho is None):
if(pindex is None):
## kdict
kdict = nkdict_fast(axes,dgrid,fourier)
## klength
if(kindex is None):
klength = nklength(kdict)
else:
klength = kindex
## power spectrum
ps = np.zeros(klength.size)
rho = np.zeros(klength.size)
for ii in np.ndindex(kdict.shape):
position = np.searchsorted(klength,kdict[ii])
ps[position] += fieldabs[ii]
rho[position] += 1
else:
## zerocenter pindex
if(np.any(zerocentered==False)):
pindex = np.fft.fftshift(pindex, axes=shiftaxes(zerocentered,st_to_zero_mode=True))
## power spectrum
ps = np.zeros(np.max(pindex)+1)
rho = np.zeros(ps.size)
for ii in np.ndindex(pindex.shape):
ps[pindex[ii]] += fieldabs[ii]
rho[pindex[ii]] += 1
elif(pindex is None):
## kdict
kdict = nkdict_fast(axes,dgrid,fourier)
## klength
if(kindex is None):
klength = nklength(kdict)
else:
klength = kindex
## power spectrum
ps = np.zeros(klength.size)
for ii in np.ndindex(kdict.shape):
position = np.searchsorted(klength,kdict[ii])
ps[position] += fieldabs[ii]
else:
## zerocenter pindex
if(np.any(zerocentered==False)):
pindex = np.fft.fftshift(pindex, axes=shiftaxes(zerocentered,st_to_zero_mode=True))
## power spectrum
ps = np.zeros(rho.size)
for ii in np.ndindex(pindex.shape):
ps[pindex[ii]] += fieldabs[ii]
ps = np.divide(ps,rho)
return ps
def get_power_index(axes,dgrid,zerocentered,irred=False,fourier=True):
"""
Returns the index of the Fourier grid points in a numpy
array, ordered following the zerocentered flag.
Parameters
----------
axes : ndarray
An array with the length of each axis.
dgrid : ndarray
An array with the pixel length of each axis.
zerocentered : bool
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
irred : bool : *optional*
If True, the function returns an array of all k-vector lengths and
their degeneracy factors. If False, just the power index array is
returned.
fourier : bool : *optional*
Whether the output should be in Fourier space or not
(default=False).
Returns
-------
index or {klength, rho} : scalar or list
Returns either an array of all k-vector lengths and
their degeneracy factors or just the power index array
depending on the flag irred.
"""
## kdict, klength
if(np.any(zerocentered==False)):
kdict = np.fft.fftshift(nkdict_fast(axes,dgrid,fourier),axes=shiftaxes(zerocentered,st_to_zero_mode=True))
else:
kdict = nkdict_fast(axes,dgrid,fourier)
klength = nklength(kdict)
## output
if(irred):
rho = np.zeros(klength.shape,dtype=np.int)
for ii in np.ndindex(kdict.shape):
rho[np.searchsorted(klength,kdict[ii])] += 1
return klength,rho
else:
ind = np.empty(axes,dtype=np.int)
for ii in np.ndindex(kdict.shape):
ind[ii] = np.searchsorted(klength,kdict[ii])
return ind
def get_power_indices(axes,dgrid,zerocentered,fourier=True):
"""
Returns the index of the Fourier grid points in a numpy
array, ordered following the zerocentered flag.
Parameters
----------
axes : ndarray
An array with the length of each axis.
dgrid : ndarray
An array with the pixel length of each axis.
zerocentered : bool
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
irred : bool : *optional*
If True, the function returns an array of all k-vector lengths and
their degeneracy factors. If False, just the power index array is
returned.
fourier : bool : *optional*
Whether the output should be in Fourier space or not
(default=False).
Returns
-------
index, klength, rho : ndarrays
Returns the power index array, an array of all k-vector lengths and
their degeneracy factors.
"""
## kdict, klength
if(np.any(zerocentered==False)):
kdict = np.fft.fftshift(nkdict_fast(axes,dgrid,fourier),axes=shiftaxes(zerocentered,st_to_zero_mode=True))
else:
kdict = nkdict_fast(axes,dgrid,fourier)
klength = nklength(kdict)
## output
ind = np.empty(axes,dtype=np.int)
rho = np.zeros(klength.shape,dtype=np.int)
for ii in np.ndindex(kdict.shape):
ind[ii] = np.searchsorted(klength,kdict[ii])
rho[ind[ii]] += 1
return ind,klength,rho
def get_power_indices2(axes,dgrid,zerocentered,fourier=True):
"""
Returns the index of the Fourier grid points in a numpy
array, ordered following the zerocentered flag.
Parameters
----------
axes : ndarray
An array with the length of each axis.
dgrid : ndarray
An array with the pixel length of each axis.
zerocentered : bool
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
irred : bool : *optional*
If True, the function returns an array of all k-vector lengths and
their degeneracy factors. If False, just the power index array is
returned.
fourier : bool : *optional*
Whether the output should be in Fourier space or not
(default=False).
Returns
-------
index, klength, rho : ndarrays
Returns the power index array, an array of all k-vector lengths and
their degeneracy factors.
"""
## kdict, klength
if(np.any(zerocentered==False)):
kdict = np.fft.fftshift(nkdict_fast2(axes,dgrid,fourier),axes=shiftaxes(zerocentered,st_to_zero_mode=True))
else:
kdict = nkdict_fast2(axes,dgrid,fourier)
klength,rho,ind = nkdict_to_indices(kdict)
return ind,klength,rho
def nkdict_to_indices(kdict):
kindex,pindex = np.unique(kdict,return_inverse=True)
pindex = pindex.reshape(kdict.shape)
rho = pindex.flatten()
rho.sort()
rho = np.unique(rho,return_index=True,return_inverse=False)[1]
rho = np.append(rho[1:]-rho[:-1],[np.prod(pindex.shape)-rho[-1]])
return kindex,rho,pindex
def bin_power_indices(pindex,kindex,rho,log=False,nbin=None,binbounds=None):
"""
Returns the (re)binned power indices associated with the Fourier grid.
Parameters
----------
pindex : ndarray
Index of the Fourier grid points in a numpy.ndarray ordered
following the zerocentered flag (default=None).
kindex : ndarray
Array of all k-vector lengths (default=None).
rho : ndarray
Degeneracy of the Fourier grid, indicating how many k-vectors in
Fourier space have the same length (default=None).
log : bool
Flag specifying if the binning is performed on logarithmic scale
(default: False).
nbin : integer
Number of used bins (default: None).
binbounds : {list, array}
Array-like inner boundaries of the used bins (default: None).
Returns
-------
pindex, kindex, rho : ndarrays
The (re)binned power indices.
"""
## boundaries
if(binbounds is not None):
binbounds = np.sort(binbounds)
## equal binning
else:
if(log is None):
log = False
if(log):
k = np.r_[0,np.log(kindex[1:])]
else:
k = kindex
dk = np.max(k[2:]-k[1:-1]) ## minimal dk
if(nbin is None):
nbin = int((k[-1]-0.5*(k[2]+k[1]))/dk-0.5) ## maximal nbin
else:
nbin = min(int(nbin),int((k[-1]-0.5*(k[2]+k[1]))/dk+2.5))
dk = (k[-1]-0.5*(k[2]+k[1]))/(nbin-2.5)
binbounds = np.r_[0.5*(3*k[1]-k[2]),0.5*(k[1]+k[2])+dk*np.arange(nbin-2)]
if(log):
binbounds = np.exp(binbounds)
## reordering
reorder = np.searchsorted(binbounds,kindex)
rho_ = np.zeros(len(binbounds)+1,dtype=rho.dtype)
kindex_ = np.empty(len(binbounds)+1,dtype=kindex.dtype)
for ii in range(len(reorder)):
if(rho_[reorder[ii]]==0):
kindex_[reorder[ii]] = kindex[ii]
rho_[reorder[ii]] += rho[ii]
else:
kindex_[reorder[ii]] = (kindex_[reorder[ii]]*rho_[reorder[ii]]+kindex[ii]*rho[ii])/(rho_[reorder[ii]]+rho[ii])
rho_[reorder[ii]] += rho[ii]
return reorder[pindex],kindex_,rho_
def nhermitianize(field,zerocentered):
"""
Hermitianizes an arbitrary n-dimensional field. Becomes relatively slow
for large n.
Parameters
----------
field : ndarray
The input field that should be hermitianized.
zerocentered : bool
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
Returns
-------
hermfield : ndarray
The hermitianized field.
"""
## shift zerocentered axes
if(np.any(zerocentered==True)):
field = np.fft.fftshift(field, axes=shiftaxes(zerocentered))
# for index in np.ndenumerate(field):
# negind = tuple(-np.array(index[0]))
# field[negind] = np.conjugate(index[1])
# if(field[negind]==field[index[0]]):
# field[index[0]] = np.abs(index[1])*(np.sign(index[1].real)+(np.sign(index[1].real)==0)*np.sign(index[1].imag)).astype(np.int)
subshape = np.array(field.shape,dtype=np.int) ## == axes
maxindex = subshape//2
subshape[np.argmax(subshape)] = subshape[np.argmax(subshape)]//2+1 ## ~half larges axis
for ii in np.ndindex(tuple(subshape)):
negii = tuple(-np.array(ii))
field[negii] = np.conjugate(field[ii])
for ii in np.ndindex((2,)*maxindex.size):
index = tuple(ii*maxindex)
field[index] = np.abs(field[index])*(np.sign(field[index].real)+(np.sign(field[index].real)==0)*-np.sign(field[index].imag)).astype(np.int) ## minus since overwritten before
## reshift zerocentered axes
if(np.any(zerocentered==True)):
field = np.fft.fftshift(field,axes=shiftaxes(zerocentered))
return field
def nhermitianize_fast(field,zerocentered,special=False):
"""
Hermitianizes an arbitrary n-dimensional field faster.
Still becomes comparably slow for large n.
Parameters
----------
field : ndarray
The input field that should be hermitianized.
zerocentered : bool
Whether the output array should be zerocentered, i.e. starting with
negative Fourier modes going over the zero mode to positive modes,
or not zerocentered, where zero, positive and negative modes are
simpy ordered consecutively.
special : bool, *optional*
Must be True for random fields drawn from Gaussian or pm1
distributions.
Returns
-------
hermfield : ndarray
The hermitianized field.
"""
## shift zerocentered axes
if(np.any(zerocentered==True)):
field = np.fft.fftshift(field, axes=shiftaxes(zerocentered))
dummy = np.conjugate(field)
## mirror conjugate field
for ii in range(field.ndim):
dummy = np.swapaxes(dummy,0,ii)
dummy = np.flipud(dummy)
dummy = np.roll(dummy,1,axis=0)
dummy = np.swapaxes(dummy,0,ii)
if(special): ## special normalisation for certain random fields
field = np.sqrt(0.5)*(field+dummy)
maxindex = np.array(field.shape,dtype=np.int)//2
for ii in np.ndindex((2,)*maxindex.size):
index = tuple(ii*maxindex)
field[index] *= np.sqrt(0.5)
else: ## regular case
field = 0.5*(field+dummy)
## reshift zerocentered axes
if(np.any(zerocentered==True)):
field = np.fft.fftshift(field,axes=shiftaxes(zerocentered))
return field
def random_hermitian_pm1(datatype,zerocentered,shape):
"""
Draws a set of hermitianized random, complex pm1 numbers.
"""
field = np.random.randint(4,high=None,size=np.prod(shape,axis=0,dtype=np.int,out=None)).reshape(shape,order='C')
dummy = np.copy(field)
## mirror field
for ii in range(field.ndim):
dummy = np.swapaxes(dummy,0,ii)
dummy = np.flipud(dummy)
dummy = np.roll(dummy,1,axis=0)
dummy = np.swapaxes(dummy,0,ii)
field = (field+dummy+2*(field>dummy)*((field+dummy)%2))%4 ## wicked magic
x = np.array([1+0j,0+1j,-1+0j,0-1j],dtype=datatype)[field]
## (re)shift zerocentered axes
if(np.any(zerocentered==True)):
field = np.fft.fftshift(field,axes=shiftaxes(zerocentered))
return x
#-----------------------------------------------------------------------------
# Auxiliary functions
#-----------------------------------------------------------------------------
def shiftaxes(zerocentered,st_to_zero_mode=False):
"""
Shifts the axes in a special way needed for some functions
"""
axes = []
for ii in range(len(zerocentered)):
if(st_to_zero_mode==False)and(zerocentered[ii]):
axes += [ii]
if(st_to_zero_mode==True)and(not zerocentered[ii]):
axes += [ii]
return axes
def nkdict(axes,dgrid,fourier=True):
"""
Calculates an n-dimensional array with its entries being the lengths of
the k-vectors from the zero point of the Fourier grid.
"""
if(fourier):
dk = dgrid
else:
dk = np.array([1/axes[i]/dgrid[i] for i in range(len(axes))])
kdict = np.empty(axes)
for ii in np.ndindex(kdict.shape):
kdict[ii] = np.sqrt(np.sum(((ii-axes//2)*dk)**2))
return kdict
def nkdict_fast(axes,dgrid,fourier=True):
"""
Calculates an n-dimensional array with its entries being the lengths of
the k-vectors from the zero point of the Fourier grid.
"""
if(fourier):
dk = dgrid
else:
dk = np.array([1/dgrid[i]/axes[i] for i in range(len(axes))])
temp_vecs = np.array(np.where(np.ones(axes)),dtype='float').reshape(np.append(len(axes),axes))
temp_vecs = np.rollaxis(temp_vecs,0,len(temp_vecs.shape))
temp_vecs -= axes//2
temp_vecs *= dk
temp_vecs *= temp_vecs
return np.sqrt(np.sum((temp_vecs),axis=-1))
def nkdict_fast2(axes,dgrid,fourier=True):
"""
Calculates an n-dimensional array with its entries being the lengths of
the k-vectors from the zero point of the grid.
"""
if(fourier):
dk = dgrid
else:
dk = np.array([1/dgrid[i]/axes[i] for i in range(len(axes))])
inds = []
for a in axes:
inds += [slice(0,a)]
cords = np.ogrid[inds]
dists = ((cords[0]-axes[0]//2)*dk[0])**2
for ii in range(1,len(axes)):
dists = dists + ((cords[ii]-axes[ii]//2)*dk[ii])**2
dists = np.sqrt(dists)
return dists
def nklength(kdict):
return np.sort(list(set(kdict.flatten())))
#def drawherm(vector,klength,kdict,ps): ## vector = np.zeros(kdict.shape,dtype=np.complex)
# for ii in np.ndindex(vector.shape):
# if(vector[ii]==np.complex(0.,0.)):
# vector[ii] = np.sqrt(0.5*ps[np.searchsorted(klength,kdict[ii])])*np.complex(np.random.normal(0.,1.),np.random.normal(0.,1.))
# negii = tuple(-np.array(ii))
# vector[negii] = np.conjugate(vector[ii])
# if(vector[negii]==vector[ii]):
# vector[ii] = np.float(np.sqrt(ps[klength==kdict[ii]]))*np.random.normal(0.,1.)
# return vector
def drawherm(klength,kdict,ps):
"""
Draws a hermitian random field from a Gaussian distribution.
"""
# vector = np.zeros(kdict.shape,dtype='complex')
# for ii in np.ndindex(vector.shape):
# if(vector[ii]==np.complex(0.,0.)):
# vector[ii] = np.sqrt(0.5*ps[np.searchsorted(klength,kdict[ii])])*np.complex(np.random.normal(0.,1.),np.random.normal(0.,1.))
# negii = tuple(-np.array(ii))
# vector[negii] = np.conjugate(vector[ii])
# if(vector[negii]==vector[ii]):
# vector[ii] = np.float(np.sqrt(ps[np.searchsorted(klength,kdict[ii])]))*np.random.normal(0.,1.)
# return vector
vec = np.random.normal(loc=0,scale=1,size=kdict.size).reshape(kdict.shape)
vec = np.fft.fftn(vec)/np.sqrt(np.prod(kdict.shape))
for ii in np.ndindex(kdict.shape):
vec[ii] *= np.sqrt(ps[np.searchsorted(klength,kdict[ii])])
return vec
#def drawwild(vector,klength,kdict,ps,real_corr=1): ## vector = np.zeros(kdict.shape,dtype=np.complex)
# for ii in np.ndindex(vector.shape):
# vector[ii] = np.sqrt(real_corr*0.5*ps[klength==kdict[ii]])*np.complex(np.random.normal(0.,1.),np.random.normal(0.,1.))
# return vector
def drawwild(klength,kdict,ps,real_corr=1):
"""
Draws a field of arbitrary symmetry from a Gaussian distribution.
"""
vec = np.empty(kdict.size,dtype=np.complex)
vec.real = np.random.normal(loc=0,scale=np.sqrt(real_corr*0.5),size=kdict.size)
vec.imag = np.random.normal(loc=0,scale=np.sqrt(real_corr*0.5),size=kdict.size)
vec = vec.reshape(kdict.shape)
for ii in np.ndindex(kdict.shape):
vec[ii] *= np.sqrt(ps[np.searchsorted(klength,kdict[ii])])
return vec
|
gpl-3.0
| 8,155,718,674,426,123,000 | 33.703655 | 181 | 0.600271 | false | 3.52186 | false | false | false |
fnordahl/nova
|
nova/exception.py
|
1
|
56858
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
import sys
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
import webob.exc
from webob import util as woutil
from nova.i18n import _, _LE
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='Make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code, title="", explanation=""):
self.code = code
# There is a strict rule about constructing status line for HTTP:
# '...Status-Line, consisting of the protocol version followed by a
# numeric status code and its associated textual phrase, with each
# element separated by SP characters'
# (http://www.faqs.org/rfcs/rfc2616.html)
# 'code' and 'title' can not be empty because they correspond
# to numeric status code and its associated text
if title:
self.title = title
else:
try:
self.title = woutil.status_reasons[self.code]
except KeyError:
msg = _LE("Improper or unknown HTTP status code used: %d")
LOG.error(msg, code)
self.title = woutil.status_generic_reasons[self.code // 100]
self.explanation = explanation
super(ConvertedException, self).__init__()
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return {k: v for k, v in six.iteritems(original) if "_pass" not in k}
def wrap_exception(notifier=None, get_notifier=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It also optionally sends the exception to the notification
system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier or get_notifier:
payload = dict(exception=e)
call_dict = safe_utils.getcallargs(f, context,
*args, **kw)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
event_type = f.__name__
(notifier or get_notifier()).error(context,
event_type,
payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
exc_info = sys.exc_info()
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_LE('Exception in string format operation'))
for name, value in six.iteritems(kwargs):
LOG.error("%s: %s" % (name, value)) # noqa
if CONF.fatal_exception_format_errors:
six.reraise(*exc_info)
else:
# at least get the core message out if something happened
message = self.msg_fmt
self.message = message
super(NovaException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class EncryptionFailure(NovaException):
msg_fmt = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
msg_fmt = _("Failed to decrypt text: %(reason)s")
class RevokeCertFailure(NovaException):
msg_fmt = _("Failed to revoke certificate for %(project_id)s")
class VirtualInterfaceCreateException(NovaException):
msg_fmt = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
msg_fmt = _("Creation of virtual interface with "
"unique mac address failed")
class VirtualInterfacePlugException(NovaException):
msg_fmt = _("Virtual interface plugin failed")
class GlanceConnectionFailed(NovaException):
msg_fmt = _("Connection to glance host %(host)s:%(port)s failed: "
"%(reason)s")
class CinderConnectionFailed(NovaException):
msg_fmt = _("Connection to cinder host failed: %(reason)s")
class Forbidden(NovaException):
ec2_code = 'AuthFailure'
msg_fmt = _("Not authorized.")
code = 403
class AdminRequired(Forbidden):
msg_fmt = _("User does not have admin privileges")
class PolicyNotAuthorized(Forbidden):
msg_fmt = _("Policy doesn't allow %(action)s to be performed.")
class VolumeLimitExceeded(Forbidden):
msg_fmt = _("Volume resource quota exceeded")
class ImageNotActive(NovaException):
# NOTE(jruzicka): IncorrectState is used for volumes only in EC2,
# but it still seems like the most appropriate option.
ec2_code = 'IncorrectState'
msg_fmt = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
msg_fmt = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class InvalidBDMImage(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get image %(id)s.")
class InvalidBDMBootSequence(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"Boot sequence for the instance "
"and image/block device mapping "
"combination is not valid.")
class InvalidBDMLocalsLimit(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"You specified more local devices than the "
"limit allows")
class InvalidBDMEphemeralSize(InvalidBDM):
msg_fmt = _("Ephemeral disks requested are larger than "
"the instance type allows.")
class InvalidBDMSwapSize(InvalidBDM):
msg_fmt = _("Swap drive requested is larger than instance type allows.")
class InvalidBDMFormat(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(details)s")
class InvalidBDMForLegacy(InvalidBDM):
msg_fmt = _("Block Device Mapping cannot "
"be converted to legacy format. ")
class InvalidBDMVolumeNotBootable(InvalidBDM):
msg_fmt = _("Block Device %(id)s is not bootable.")
class InvalidAttribute(Invalid):
msg_fmt = _("Attribute not supported: %(attr)s")
class ValidationError(Invalid):
msg_fmt = "%(detail)s"
class VolumeUnattached(Invalid):
ec2_code = 'IncorrectState'
msg_fmt = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
msg_fmt = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts. And its status is %(volume_status)s.")
class VolumeEncryptionNotSupported(Invalid):
msg_fmt = _("Volume encryption is not supported for %(volume_type)s "
"volume %(volume_id)s")
class InvalidKeypair(Invalid):
ec2_code = 'InvalidKeyPair.Format'
msg_fmt = _("Keypair data is invalid: %(reason)s")
class InvalidRequest(Invalid):
msg_fmt = _("The request is invalid.")
class InvalidInput(Invalid):
msg_fmt = _("Invalid input received: %(reason)s")
class InvalidVolume(Invalid):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Invalid volume: %(reason)s")
class InvalidVolumeAccessMode(Invalid):
msg_fmt = _("Invalid volume access mode: %(access_mode)s")
class InvalidMetadata(Invalid):
msg_fmt = _("Invalid metadata: %(reason)s")
class InvalidMetadataSize(Invalid):
msg_fmt = _("Invalid metadata size: %(reason)s")
class InvalidPortRange(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
msg_fmt = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
msg_fmt = _("Invalid content type %(content_type)s.")
class InvalidAPIVersionString(Invalid):
msg_fmt = _("API Version String %(version)s is of invalid format. Must "
"be of format MajorNum.MinorNum.")
class VersionNotFoundForAPIMethod(Invalid):
msg_fmt = _("API version %(version)s is not supported on this method.")
class InvalidGlobalAPIVersion(Invalid):
msg_fmt = _("Version %(req_ver)s is not supported by the API. Minimum "
"is %(min_ver)s and maximum is %(max_ver)s.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("%(err)s")
class InvalidAggregateAction(Invalid):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidAggregateActionAdd(InvalidAggregateAction):
msg_fmt = _("Cannot add host to aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionDelete(InvalidAggregateAction):
msg_fmt = _("Cannot remove host from aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionUpdate(InvalidAggregateAction):
msg_fmt = _("Cannot update aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionUpdateMeta(InvalidAggregateAction):
msg_fmt = _("Cannot update metadata of aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
msg_fmt = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
msg_fmt = _("Sort key supplied was not valid.")
class InvalidStrTime(Invalid):
msg_fmt = _("Invalid datetime string: %(reason)s")
class InstanceInvalidState(Invalid):
msg_fmt = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
msg_fmt = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
msg_fmt = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
msg_fmt = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
msg_fmt = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
msg_fmt = _("Failed to suspend instance: %(reason)s")
class InstanceResumeFailure(Invalid):
msg_fmt = _("Failed to resume instance: %(reason)s")
class InstancePowerOnFailure(Invalid):
msg_fmt = _("Failed to power on instance: %(reason)s")
class InstancePowerOffFailure(Invalid):
msg_fmt = _("Failed to power off instance: %(reason)s")
class InstanceRebootFailure(Invalid):
msg_fmt = _("Failed to reboot instance: %(reason)s")
class InstanceTerminationFailure(Invalid):
msg_fmt = _("Failed to terminate instance: %(reason)s")
class InstanceDeployFailure(Invalid):
msg_fmt = _("Failed to deploy instance: %(reason)s")
class MultiplePortsNotApplicable(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class InvalidFixedIpAndMaxCountRequest(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class ServiceUnavailable(Invalid):
msg_fmt = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
msg_fmt = _("Insufficient compute resources: %(reason)s.")
class HypervisorUnavailable(NovaException):
msg_fmt = _("Connection to the hypervisor is broken on host: %(host)s")
class ComputeServiceUnavailable(ServiceUnavailable):
msg_fmt = _("Compute service of %(host)s is unavailable at this time.")
class ComputeServiceInUse(NovaException):
msg_fmt = _("Compute service of %(host)s is still in use.")
class UnableToMigrateToSelf(Invalid):
msg_fmt = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
msg_fmt = _("The supplied hypervisor type of is invalid.")
class DestinationHypervisorTooOld(Invalid):
msg_fmt = _("The instance requires a newer hypervisor version than "
"has been provided.")
class ServiceTooOld(Invalid):
msg_fmt = _("This service is older (v%(thisver)i) than the minimum "
"(v%(minver)i) version of the rest of the deployment. "
"Unable to continue.")
class DestinationDiskExists(Invalid):
msg_fmt = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
msg_fmt = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
msg_fmt = _("Unacceptable CPU info: %(reason)s")
class InvalidIpAddressError(Invalid):
msg_fmt = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
msg_fmt = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
msg_fmt = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
msg_fmt = _("Disk format %(disk_format)s is not acceptable")
class InvalidDiskInfo(Invalid):
msg_fmt = _("Disk info file is invalid: %(reason)s")
class DiskInfoReadWriteFail(Invalid):
msg_fmt = _("Failed to read or write disk info file: %(reason)s")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class InstanceUnacceptable(Invalid):
msg_fmt = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
msg_fmt = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
msg_fmt = _("Invalid ID received %(id)s.")
class ConstraintNotMet(NovaException):
msg_fmt = _("Constraint not met.")
code = 412
class NotFound(NovaException):
msg_fmt = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
msg_fmt = _("No agent-build associated with id %(id)s.")
class AgentBuildExists(NovaException):
msg_fmt = _("Agent-build with hypervisor %(hypervisor)s os %(os)s "
"architecture %(architecture)s exists.")
class VolumeNotFound(NotFound):
ec2_code = 'InvalidVolume.NotFound'
msg_fmt = _("Volume %(volume_id)s could not be found.")
class BDMNotFound(NotFound):
msg_fmt = _("No Block Device Mapping with id %(id)s.")
class VolumeBDMNotFound(NotFound):
msg_fmt = _("No volume Block Device Mapping with id %(volume_id)s.")
class VolumeBDMPathNotFound(VolumeBDMNotFound):
msg_fmt = _("No volume Block Device Mapping at path: %(path)s")
class SnapshotNotFound(NotFound):
ec2_code = 'InvalidSnapshot.NotFound'
msg_fmt = _("Snapshot %(snapshot_id)s could not be found.")
class DiskNotFound(NotFound):
msg_fmt = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
msg_fmt = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
msg_fmt = _("Invalid image href %(image_href)s.")
class AutoDiskConfigDisabledByImage(Invalid):
msg_fmt = _("Requested image %(image)s "
"has automatic disk resize disabled.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
class PreserveEphemeralNotSupported(Invalid):
msg_fmt = _("The current driver does not support "
"preserving ephemeral partitions.")
# NOTE(jruzicka): ImageNotFound is not a valid EC2 error code.
class ImageNotFoundEC2(ImageNotFound):
msg_fmt = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
msg_fmt = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
msg_fmt = _("Cannot find SR to read/write VDI.")
class InstanceMappingNotFound(NotFound):
msg_fmt = _("Instance %(uuid)s has no mapping to a cell.")
class NetworkDuplicated(Invalid):
msg_fmt = _("Network %(network_id)s is duplicated.")
class NetworkDhcpReleaseFailed(NovaException):
msg_fmt = _("Failed to release IP %(address)s with MAC %(mac_address)s")
class NetworkInUse(NovaException):
msg_fmt = _("Network %(network_id)s is still in use.")
class NetworkSetHostFailed(NovaException):
msg_fmt = _("Network set host failed for network %(network_id)s.")
class NetworkNotCreated(Invalid):
msg_fmt = _("%(req)s is required to create a network.")
class LabelTooLong(Invalid):
msg_fmt = _("Maximum allowed length for 'label' is 255.")
class InvalidIntValue(Invalid):
msg_fmt = _("%(key)s must be an integer.")
class InvalidCidr(Invalid):
msg_fmt = _("%(cidr)s is not a valid ip network.")
class InvalidAddress(Invalid):
msg_fmt = _("%(address)s is not a valid ip address.")
class AddressOutOfRange(Invalid):
msg_fmt = _("%(address)s is not within %(cidr)s.")
class DuplicateVlan(NovaException):
msg_fmt = _("Detected existing vlan with id %(vlan)d")
code = 409
class CidrConflict(NovaException):
msg_fmt = _('Requested cidr (%(cidr)s) conflicts '
'with existing cidr (%(other)s)')
code = 409
class NetworkHasProject(NetworkInUse):
msg_fmt = _('Network must be disassociated from project '
'%(project_id)s before it can be deleted.')
class NetworkNotFound(NotFound):
msg_fmt = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
msg_fmt = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
msg_fmt = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
msg_fmt = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
msg_fmt = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
msg_fmt = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
msg_fmt = _("No networks defined.")
class NoMoreNetworks(NovaException):
msg_fmt = _("No more available networks.")
class NetworkNotFoundForProject(NetworkNotFound):
msg_fmt = _("Either network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class NetworkAmbiguous(Invalid):
msg_fmt = _("More than one possible network found. Specify "
"network ID(s) to select which one(s) to connect to.")
class NetworkRequiresSubnet(Invalid):
msg_fmt = _("Network %(network_uuid)s requires a subnet in order to boot"
" instances on.")
class ExternalNetworkAttachForbidden(Forbidden):
msg_fmt = _("It is not allowed to create an interface on "
"external network %(network_uuid)s")
class NetworkMissingPhysicalNetwork(NovaException):
msg_fmt = _("Physical network is missing for network %(network_uuid)s")
class VifDetailsMissingVhostuserSockPath(Invalid):
msg_fmt = _("vhostuser_sock_path not present in vif_details"
" for vif %(vif_id)s")
class VifDetailsMissingMacvtapParameters(Invalid):
msg_fmt = _("Parameters %(missing_params)s not present in"
" vif_details for vif %(vif_id)s. Check your Neutron"
" configuration to validate that the macvtap parameters are"
" correct.")
class DatastoreNotFound(NotFound):
msg_fmt = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(Invalid):
msg_fmt = _("Port %(port_id)s is still in use.")
class PortRequiresFixedIP(Invalid):
msg_fmt = _("Port %(port_id)s requires a FixedIP in order to be used.")
class PortNotUsable(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotFree(Invalid):
msg_fmt = _("No free port available for instance %(instance)s.")
class PortBindingFailed(Invalid):
msg_fmt = _("Binding failed for port %(port_id)s, please check neutron "
"logs for more information.")
class FixedIpExists(NovaException):
msg_fmt = _("Fixed ip %(address)s already exists.")
class FixedIpNotFound(NotFound):
msg_fmt = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
msg_fmt = _("Fixed ip not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s has zero fixed ips.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
msg_fmt = _("Network host %(host)s has zero fixed ips "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s doesn't have fixed ip '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
msg_fmt = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAssociateFailed(NovaException):
msg_fmt = _("Fixed IP associate failed for network: %(net)s.")
class FixedIpAlreadyInUse(NovaException):
msg_fmt = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
msg_fmt = _("More than one instance is associated with fixed ip address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
msg_fmt = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("No fixed IP addresses available for network: %(net)s")
class NoFixedIpsDefined(NotFound):
msg_fmt = _("Zero fixed ips could be found.")
class FloatingIpExists(NovaException):
msg_fmt = _("Floating ip %(address)s already exists.")
class FloatingIpNotFound(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip not found for id %(id)s.")
class FloatingIpDNSExists(Invalid):
msg_fmt = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
msg_fmt = _("Multiple floating ips are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
msg_fmt = _("Floating ip pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
msg_fmt = _("Zero floating ips available.")
safe = True
class FloatingIpAssociated(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
msg_fmt = _("Floating ip %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
msg_fmt = _("Zero floating ips exist.")
class NoFloatingIpInterface(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Interface %(interface)s not found.")
class FloatingIpAllocateFailed(NovaException):
msg_fmt = _("Floating IP allocate failed.")
class FloatingIpAssociateFailed(NovaException):
msg_fmt = _("Floating IP %(address)s association has failed.")
class FloatingIpBadRequest(Invalid):
ec2_code = "UnsupportedOperation"
msg_fmt = _("The floating IP request failed with a BadRequest")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Cannot disassociate auto assigned floating ip")
class KeypairNotFound(NotFound):
ec2_code = 'InvalidKeyPair.NotFound'
msg_fmt = _("Keypair %(name)s not found for user %(user_id)s")
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class ServiceBinaryExists(NovaException):
msg_fmt = _("Service with host %(host)s binary %(binary)s exists.")
class ServiceTopicExists(NovaException):
msg_fmt = _("Service with host %(host)s topic %(topic)s exists.")
class HostNotFound(NotFound):
msg_fmt = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
msg_fmt = _("Compute host %(host)s could not be found.")
class ComputeHostNotCreated(HostNotFound):
msg_fmt = _("Compute host %(name)s needs to be created first"
" before updating.")
class HostBinaryNotFound(NotFound):
msg_fmt = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
msg_fmt = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
msg_fmt = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class InvalidQuotaMethodUsage(Invalid):
msg_fmt = _("Wrong quota method %(method)s used on resource %(res)s")
class QuotaNotFound(NotFound):
msg_fmt = _("Quota could not be found")
class QuotaExists(NovaException):
msg_fmt = _("Quota exists for project %(project_id)s, "
"resource %(resource)s")
class QuotaResourceUnknown(QuotaNotFound):
msg_fmt = _("Unknown quota resources %(unknown)s.")
class ProjectUserQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for user %(user_id)s in project %(project_id)s "
"could not be found.")
class ProjectQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
msg_fmt = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
msg_fmt = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
msg_fmt = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
msg_fmt = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
msg_fmt = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
msg_fmt = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
msg_fmt = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExists(Invalid):
ec2_code = 'InvalidGroup.Duplicate'
msg_fmt = _("Security group %(security_group_name)s already exists "
"for project %(project_id)s.")
class SecurityGroupExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
msg_fmt = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
msg_fmt = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class SecurityGroupRuleExists(Invalid):
ec2_code = 'InvalidPermission.Duplicate'
msg_fmt = _("Rule already exists in group: %(rule)s")
class NoUniqueMatch(NovaException):
msg_fmt = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
msg_fmt = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
msg_fmt = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class ConsolePoolNotFound(NotFound):
msg_fmt = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolExists(NovaException):
msg_fmt = _("Console pool with host %(host)s, console_type "
"%(console_type)s and compute_host %(compute_host)s "
"already exists.")
class ConsolePoolNotFoundForHostType(NotFound):
msg_fmt = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
msg_fmt = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
msg_fmt = _("Invalid console type %(console_type)s")
class ConsoleTypeUnavailable(Invalid):
msg_fmt = _("Unavailable console type %(console_type)s.")
class ConsolePortRangeExhausted(NovaException):
msg_fmt = _("The console port range %(min_port)d-%(max_port)d is "
"exhausted.")
class FlavorNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s could not be found.")
class FlavorNotFoundByName(FlavorNotFound):
msg_fmt = _("Flavor with name %(flavor_name)s could not be found.")
class FlavorAccessNotFound(NotFound):
msg_fmt = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class FlavorExtraSpecUpdateCreateFailed(NovaException):
msg_fmt = _("Flavor %(id)d extra spec cannot be updated or created "
"after %(retries)d retries.")
class CellNotFound(NotFound):
msg_fmt = _("Cell %(cell_name)s doesn't exist.")
class CellExists(NovaException):
msg_fmt = _("Cell with name %(name)s already exists.")
class CellRoutingInconsistency(NovaException):
msg_fmt = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
msg_fmt = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
msg_fmt = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
msg_fmt = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
msg_fmt = _("No cells available matching scheduling criteria.")
class CellsUpdateUnsupported(NovaException):
msg_fmt = _("Cannot update cells configuration file.")
class InstanceUnknownCell(NotFound):
msg_fmt = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
msg_fmt = _("Scheduler Host Filter %(filter_name)s could not be found.")
class FlavorExtraSpecsNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class ComputeHostMetricNotFound(NotFound):
msg_fmt = _("Metric %(name)s could not be found on the compute "
"host node %(host)s.%(node)s.")
class FileNotFound(NotFound):
msg_fmt = _("File %(file_path)s could not be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
msg_fmt = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
msg_fmt = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
msg_fmt = _("Class %(class_name)s could not be found: %(exception)s")
class InstanceTagNotFound(NotFound):
msg_fmt = _("Instance %(instance_id)s has no tag '%(tag)s'")
class RotationRequiredForBackup(NovaException):
msg_fmt = _("Rotation param is required for backup image_type")
class KeyPairExists(NovaException):
ec2_code = 'InvalidKeyPair.Duplicate'
msg_fmt = _("Key pair '%(key_name)s' already exists.")
class InstanceExists(NovaException):
msg_fmt = _("Instance %(name)s already exists.")
class FlavorExists(NovaException):
msg_fmt = _("Flavor with name %(name)s already exists.")
class FlavorIdExists(NovaException):
msg_fmt = _("Flavor with ID %(flavor_id)s already exists.")
class FlavorAccessExists(NovaException):
msg_fmt = _("Flavor access already exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
msg_fmt = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
msg_fmt = _("%(path)s is not on local storage: %(reason)s")
class StorageError(NovaException):
msg_fmt = _("Storage error: %(reason)s")
class MigrationError(NovaException):
msg_fmt = _("Migration error: %(reason)s")
class MigrationPreCheckError(MigrationError):
msg_fmt = _("Migration pre-check error: %(reason)s")
class MalformedRequestBody(NovaException):
msg_fmt = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
msg_fmt = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
msg_fmt = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
msg_fmt = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
msg_fmt = _("Resize error: %(reason)s")
class CannotResizeDisk(NovaException):
msg_fmt = _("Server disk was unable to be resized because: %(reason)s")
class FlavorMemoryTooSmall(NovaException):
msg_fmt = _("Flavor's memory is too small for requested image.")
class FlavorDiskTooSmall(NovaException):
msg_fmt = _("The created instance's disk would be too small.")
class FlavorDiskSmallerThanImage(FlavorDiskTooSmall):
msg_fmt = _("Flavor's disk is too small for requested image. Flavor disk "
"is %(flavor_size)i bytes, image is %(image_size)i bytes.")
class FlavorDiskSmallerThanMinDisk(FlavorDiskTooSmall):
msg_fmt = _("Flavor's disk is smaller than the minimum size specified in "
"image metadata. Flavor disk is %(flavor_size)i bytes, "
"minimum size is %(image_min_disk)i bytes.")
class VolumeSmallerThanMinDisk(FlavorDiskTooSmall):
msg_fmt = _("Volume is smaller than the minimum size specified in image "
"metadata. Volume size is %(volume_size)i bytes, minimum "
"size is %(image_min_disk)i bytes.")
class InsufficientFreeMemory(NovaException):
msg_fmt = _("Insufficient free memory on compute node to start %(uuid)s.")
class NoValidHost(NovaException):
msg_fmt = _("No valid host was found. %(reason)s")
class MaxRetriesExceeded(NoValidHost):
msg_fmt = _("Exceeded maximum number of retries. %(reason)s")
class QuotaError(NovaException):
ec2_code = 'ResourceLimitExceeded'
msg_fmt = _("Quota exceeded: code=%(code)s")
# NOTE(cyeoh): 413 should only be used for the ec2 API
# The error status code for out of quota for the nova api should be
# 403 Forbidden.
code = 413
headers = {'Retry-After': 0}
safe = True
class TooManyInstances(QuotaError):
msg_fmt = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)s of %(allowed)s %(overs)s")
class FloatingIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of floating ips exceeded")
class FixedIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of fixed ips exceeded")
class MetadataLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
msg_fmt = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file path too long")
class OnsetFileContentLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
ec2_code = 'SecurityGroupLimitExceeded'
msg_fmt = _("Maximum number of security groups or rules exceeded")
class PortLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of ports exceeded")
class AggregateError(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class FlavorCreateFailed(NovaException):
msg_fmt = _("Unable to create flavor")
class InstancePasswordSetFailed(NovaException):
msg_fmt = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class InstanceNotFound(NotFound):
ec2_code = 'InvalidInstanceID.NotFound'
msg_fmt = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
msg_fmt = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class InvalidAssociation(NotFound):
ec2_code = 'InvalidAssociationID.NotFound'
msg_fmt = _("Invalid association.")
class MarkerNotFound(NotFound):
msg_fmt = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(instance_id)s (expecting \"i-...\")")
ec2_code = 'InvalidInstanceID.Malformed'
class InvalidVolumeIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(volume_id)s (expecting \"i-...\")")
ec2_code = 'InvalidVolumeID.Malformed'
class CouldNotFetchImage(NovaException):
msg_fmt = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
msg_fmt = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
msg_fmt = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
msg_fmt = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
msg_fmt = _("Instance %(instance_uuid)s is locked")
class ConfigDriveInvalidValue(Invalid):
msg_fmt = _("Invalid value for Config Drive option: %(option)s")
class ConfigDriveMountFailed(NovaException):
msg_fmt = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
msg_fmt = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class InterfaceAttachFailed(Invalid):
msg_fmt = _("Failed to attach network adapter device to "
"%(instance_uuid)s")
class InterfaceDetachFailed(Invalid):
msg_fmt = _("Failed to detach network adapter device from "
"%(instance_uuid)s")
class InstanceUserDataTooLarge(NovaException):
msg_fmt = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
msg_fmt = _("User data needs to be valid base 64.")
class InstanceUpdateConflict(NovaException):
msg_fmt = _("Conflict updating instance %(instance_uuid)s. "
"Expected: %(expected)s. Actual: %(actual)s")
class UnknownInstanceUpdateConflict(InstanceUpdateConflict):
msg_fmt = _("Conflict updating instance %(instance_uuid)s, but we were "
"unable to determine the cause")
class UnexpectedTaskStateError(InstanceUpdateConflict):
pass
class UnexpectedDeletingTaskStateError(UnexpectedTaskStateError):
pass
class InstanceActionNotFound(NovaException):
msg_fmt = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
msg_fmt = _("Event %(event)s not found for action id %(action_id)s")
class CryptoCAFileNotFound(FileNotFound):
msg_fmt = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
msg_fmt = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
msg_fmt = _('Instance recreate is not supported.')
class ServiceGroupUnavailable(NovaException):
msg_fmt = _("The service from servicegroup driver %(driver)s is "
"temporarily unavailable.")
class DBNotAllowed(NovaException):
msg_fmt = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
msg_fmt = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
msg_fmt = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
msg_fmt = _("Invalid Base 64 data for file %(path)s")
class BuildAbortException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s aborted: %(reason)s")
class RescheduledException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s was re-scheduled: "
"%(reason)s")
class ShadowTableExists(NovaException):
msg_fmt = _("Shadow table with name %(name)s already exists.")
class InstanceFaultRollback(NovaException):
def __init__(self, inner_exception=None):
message = _("Instance rollback performed due to: %s")
self.inner_exception = inner_exception
super(InstanceFaultRollback, self).__init__(message % inner_exception)
class OrphanedObjectError(NovaException):
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
class ObjectActionError(NovaException):
msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class CoreAPIMissing(NovaException):
msg_fmt = _("Core API extensions are missing: %(missing_apis)s")
class AgentError(NovaException):
msg_fmt = _('Error during following call to agent: %(method)s')
class AgentTimeout(AgentError):
msg_fmt = _('Unable to contact guest agent. '
'The following call timed out: %(method)s')
class AgentNotImplemented(AgentError):
msg_fmt = _('Agent does not support the call: %(method)s')
class InstanceGroupNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s could not be found.")
class InstanceGroupIdExists(NovaException):
msg_fmt = _("Instance group %(group_uuid)s already exists.")
class InstanceGroupMemberNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no member with "
"id %(instance_id)s.")
class InstanceGroupPolicyNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no policy %(policy)s.")
class InstanceGroupSaveException(NovaException):
msg_fmt = _("%(field)s should not be part of the updates.")
class PluginRetriesExceeded(NovaException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class ImageDownloadModuleError(NovaException):
msg_fmt = _("There was an error with the download module %(module)s. "
"%(reason)s")
class ImageDownloadModuleMetaDataError(ImageDownloadModuleError):
msg_fmt = _("The metadata for this location will not work with this "
"module %(module)s. %(reason)s.")
class ImageDownloadModuleNotImplementedError(ImageDownloadModuleError):
msg_fmt = _("The method %(method_name)s is not implemented.")
class ImageDownloadModuleConfigurationError(ImageDownloadModuleError):
msg_fmt = _("The module %(module)s is misconfigured: %(reason)s.")
class ResourceMonitorError(NovaException):
msg_fmt = _("Error when creating resource monitor: %(monitor)s")
class PciDeviceWrongAddressFormat(NovaException):
msg_fmt = _("The PCI address %(address)s has an incorrect format.")
class PciDeviceInvalidAddressField(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI address %(address)s has an invalid %(field)s.")
class PciDeviceInvalidDeviceName(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI whitelist can specify devname or address,"
" but not both")
class PciDeviceNotFoundById(NotFound):
msg_fmt = _("PCI device %(id)s not found")
class PciDeviceNotFound(NotFound):
msg_fmt = _("PCI Device %(node_id)s:%(address)s not found.")
class PciDeviceInvalidStatus(Invalid):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceInvalidOwner(Invalid):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is owned by %(owner)s "
"instead of %(hopeowner)s")
class PciDeviceRequestFailed(NovaException):
msg_fmt = _(
"PCI device request (%requests)s failed")
class PciDevicePoolEmpty(NovaException):
msg_fmt = _(
"Attempt to consume PCI device %(compute_node_id)s:%(address)s "
"from empty pool")
class PciInvalidAlias(Invalid):
msg_fmt = _("Invalid PCI alias definition: %(reason)s")
class PciRequestAliasNotDefined(NovaException):
msg_fmt = _("PCI alias %(alias)s is not defined")
class MissingParameter(NovaException):
ec2_code = 'MissingParameter'
msg_fmt = _("Not enough parameters: %(reason)s")
code = 400
class PciConfigInvalidWhitelist(Invalid):
msg_fmt = _("Invalid PCI devices Whitelist config %(reason)s")
# Cannot be templated, msg needs to be constructed when raised.
class InternalError(NovaException):
ec2_code = 'InternalError'
msg_fmt = "%(err)s"
class PciDevicePrepareFailed(NovaException):
msg_fmt = _("Failed to prepare PCI device %(id)s for instance "
"%(instance_uuid)s: %(reason)s")
class PciDeviceDetachFailed(NovaException):
msg_fmt = _("Failed to detach PCI device %(dev)s: %(reason)s")
class PciDeviceUnsupportedHypervisor(NovaException):
msg_fmt = _("%(type)s hypervisor does not support PCI devices")
class KeyManagerError(NovaException):
msg_fmt = _("Key manager error: %(reason)s")
class VolumesNotRemoved(Invalid):
msg_fmt = _("Failed to remove volume(s): (%(reason)s)")
class InvalidVideoMode(Invalid):
msg_fmt = _("Provided video model (%(model)s) is not supported.")
class RngDeviceNotExist(Invalid):
msg_fmt = _("The provided RNG device path: (%(path)s) is not "
"present on the host.")
class RequestedVRamTooHigh(NovaException):
msg_fmt = _("The requested amount of video memory %(req_vram)d is higher "
"than the maximum allowed by flavor %(max_vram)d.")
class InvalidWatchdogAction(Invalid):
msg_fmt = _("Provided watchdog action (%(action)s) is not supported.")
class NoLiveMigrationForConfigDriveInLibVirt(NovaException):
msg_fmt = _("Live migration of instances with config drives is not "
"supported in libvirt unless libvirt instance path and "
"drive data is shared across compute nodes.")
class LiveMigrationWithOldNovaNotSafe(NovaException):
msg_fmt = _("Host %(server)s is running an old version of Nova, "
"live migrations involving that version may cause data loss. "
"Upgrade Nova on %(server)s and try again.")
class UnshelveException(NovaException):
msg_fmt = _("Error during unshelve instance %(instance_id)s: %(reason)s")
class ImageVCPULimitsRangeExceeded(Invalid):
msg_fmt = _("Image vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPUTopologyRangeExceeded(Invalid):
msg_fmt = _("Image vCPU topology %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPULimitsRangeImpossible(Invalid):
msg_fmt = _("Requested vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"are impossible to satisfy for vcpus count %(vcpus)d")
class InvalidArchitectureName(Invalid):
msg_fmt = _("Architecture name '%(arch)s' is not recognised")
class ImageNUMATopologyIncomplete(Invalid):
msg_fmt = _("CPU and memory allocation must be provided for all "
"NUMA nodes")
class ImageNUMATopologyForbidden(Forbidden):
msg_fmt = _("Image property '%(name)s' is not permitted to override "
"NUMA configuration set against the flavor")
class ImageNUMATopologyAsymmetric(Invalid):
msg_fmt = _("Asymmetric NUMA topologies require explicit assignment "
"of CPUs and memory to nodes in image or flavor")
class ImageNUMATopologyCPUOutOfRange(Invalid):
msg_fmt = _("CPU number %(cpunum)d is larger than max %(cpumax)d")
class ImageNUMATopologyCPUDuplicates(Invalid):
msg_fmt = _("CPU number %(cpunum)d is assigned to two nodes")
class ImageNUMATopologyCPUsUnassigned(Invalid):
msg_fmt = _("CPU number %(cpuset)s is not assigned to any node")
class ImageNUMATopologyMemoryOutOfRange(Invalid):
msg_fmt = _("%(memsize)d MB of memory assigned, but expected "
"%(memtotal)d MB")
class InvalidHostname(Invalid):
msg_fmt = _("Invalid characters in hostname '%(hostname)s'")
class NumaTopologyNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s does not specify a NUMA topology")
class MigrationContextNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s does not specify a migration "
"context.")
class SocketPortRangeExhaustedException(NovaException):
msg_fmt = _("Not able to acquire a free port for %(host)s")
class SocketPortInUseException(NovaException):
msg_fmt = _("Not able to bind %(host)s:%(port)d, %(error)s")
class ImageSerialPortNumberInvalid(Invalid):
msg_fmt = _("Number of serial ports '%(num_ports)s' specified in "
"'%(property)s' isn't valid.")
class ImageSerialPortNumberExceedFlavorValue(Invalid):
msg_fmt = _("Forbidden to exceed flavor value of number of serial "
"ports passed in image meta.")
class InvalidImageConfigDrive(Invalid):
msg_fmt = _("Image's config drive option '%(config_drive)s' is invalid")
class InvalidHypervisorVirtType(Invalid):
msg_fmt = _("Hypervisor virtualization type '%(hv_type)s' is not "
"recognised")
class InvalidVirtualMachineMode(Invalid):
msg_fmt = _("Virtual machine mode '%(vmmode)s' is not recognised")
class InvalidToken(Invalid):
msg_fmt = _("The token '%(token)s' is invalid or has expired")
class InvalidConnectionInfo(Invalid):
msg_fmt = _("Invalid Connection Info")
class InstanceQuiesceNotSupported(Invalid):
msg_fmt = _('Quiescing is not supported in instance %(instance_id)s')
class QemuGuestAgentNotEnabled(Invalid):
msg_fmt = _('QEMU guest agent is not enabled')
class SetAdminPasswdNotSupported(Invalid):
msg_fmt = _('Set admin password is not supported')
class MemoryPageSizeInvalid(Invalid):
msg_fmt = _("Invalid memory page size '%(pagesize)s'")
class MemoryPageSizeForbidden(Invalid):
msg_fmt = _("Page size %(pagesize)s forbidden against '%(against)s'")
class MemoryPageSizeNotSupported(Invalid):
msg_fmt = _("Page size %(pagesize)s is not supported by the host.")
class CPUPinningNotSupported(Invalid):
msg_fmt = _("CPU pinning is not supported by the host: "
"%(reason)s")
class CPUPinningInvalid(Invalid):
msg_fmt = _("Cannot pin/unpin cpus %(requested)s from the following "
"pinned set %(pinned)s")
class CPUPinningUnknown(Invalid):
msg_fmt = _("CPU set to pin/unpin %(requested)s must be a subset of "
"known CPU set %(cpuset)s")
class ImageCPUPinningForbidden(Forbidden):
msg_fmt = _("Image property 'hw_cpu_policy' is not permitted to override "
"CPU pinning policy set against the flavor")
class UnsupportedPolicyException(Invalid):
msg_fmt = _("ServerGroup policy is not supported: %(reason)s")
class CellMappingNotFound(NotFound):
msg_fmt = _("Cell %(uuid)s has no mapping.")
class NUMATopologyUnsupported(Invalid):
msg_fmt = _("Host does not support guests with NUMA topology set")
class MemoryPagesUnsupported(Invalid):
msg_fmt = _("Host does not support guests with custom memory page sizes")
class EnumFieldInvalid(Invalid):
msg_fmt = _('%(typename)s in %(fieldname)s is not an instance of Enum')
class EnumFieldUnset(Invalid):
msg_fmt = _('%(fieldname)s missing field type')
class InvalidImageFormat(Invalid):
msg_fmt = _("Invalid image format '%(format)s'")
class UnsupportedImageModel(Invalid):
msg_fmt = _("Image model '%(image)s' is not supported")
class HostMappingNotFound(Invalid):
msg_fmt = _("Host '%(name)s' is not mapped to any cell")
|
apache-2.0
| -6,969,698,482,296,145,000 | 28.09826 | 79 | 0.672852 | false | 3.944364 | false | false | false |
google/ion
|
ion/dev/doxygen_filter.py
|
1
|
8299
|
#!/usr/bin/python
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Doxygen pre-filter script for ion.
This filter processes code and adds Doxygen-compatible markup in various places
to enable Doxygen to read the docs more fully. Unlike some other Doxygen
filters, it is designed to work with Doxygen's newer markdown syntax.
In order to ensure proper syntax coloring of indented code blocks, make sure
there is a blank (commented) line both above and below the block. For example:
// Comment comment comment.
//
// int CodeBlock() {
// Goes here;
// }
//
// More comment.
"""
import re
import sys
class DoxygenFormatter(object):
"""Transforms lines of a source file to make them doxygen-friendly."""
ANYWHERE = 'anywhere'
COMMENT = 'comment'
def __init__(self, outfile):
# The file-like object to which we will write lines.
self.out = outfile
# A buffer for storing empty lines which we can use later if we need to
# retroactively insert markup without causing line number offset problems.
self.empty_line_buffer = []
# Whether we are currently inside an indented code block.
self.in_code_block = False
self.CompileExpressions()
def CompileExpressions(self):
"""Pre-compiles frequently used regexps for improved performance.
The regexps are arranged as a list of 3-tuples, where the second value is
the replacement string (which may include backreferences) and the third
value is one of the context constants ANYWHERE or COMMENT. This is a list
of tuples instead of a dictionary because order matters: earlier regexps
will be applied first, and the resulting text (not the original) will be
what is seen by subsequent regexps.
"""
self.comment_regex = re.compile(r'^\s*//')
self.substitutions = [
# Remove copyright lines.
(re.compile(r'^\s*//\s*[Cc]opyright.*Google.*'), r'', self.ANYWHERE),
# Remove any comment lines that consist of only punctuation (banners).
# We only allow a maximum of two spaces before the punctuation so we
# don't accidentally get rid of code examples with bare braces and
# whatnot.
(re.compile(r'(^\s*)//\s{0,2}[-=#/]+$'), r'\1//\n', self.ANYWHERE),
# If we find something that looks like a list item that is indented four
# or more spaces, pull it back to the left so doxygen's Markdown engine
# doesn't treat it like a code block.
(re.compile(r'(^\s*)//\s{4,}([-\d*].*)'), r'\1 \2', self.COMMENT),
(re.compile(r'TODO'), r'@todo ', self.COMMENT),
# Replace leading 'Note:' or 'Note that' in a comment with @note
(re.compile(r'(\/\/\s+)Note(?:\:| that)', re.I), r'\1@note',
self.COMMENT),
# Replace leading 'Warning:' in a comment with @warning
(re.compile(r'(\/\/\s+)Warning:', re.I), r'\1@warning', self.COMMENT),
# Replace leading 'Deprecated' in a comment with @deprecated
(re.compile(r'(\/\/\s+)Deprecated[^\w\s]*', re.I), r'\1@deprecated',
self.COMMENT),
# Replace pipe-delimited parameter names with backtick-delimiters
(re.compile(r'\|(\w+)\|'), r'`\1`', self.COMMENT),
# Convert standalone comment lines to Doxygen style.
(re.compile(r'(^\s*)//(?=[^/])'), r'\1///', self.ANYWHERE),
# Strip trailing comments from preprocessor directives.
(re.compile(r'(^#.*)//.*'), r'\1', self.ANYWHERE),
# Convert remaining trailing comments to doxygen style, unless they are
# documenting the end of a block.
(re.compile(r'([^} ]\s+)//(?=[^/])'), r'\1///<', self.ANYWHERE),
]
def Transform(self, line):
"""Performs the regexp transformations defined by self.substitutions.
Args:
line: The line to transform.
Returns:
The resulting line.
"""
for (regex, repl, where) in self.substitutions:
if where is self.COMMENT and not self.comment_regex.match(line):
return line
line = regex.sub(repl, line)
return line
def AppendToBufferedLine(self, text):
"""Appends text to the last buffered empty line.
Empty lines are buffered rather than being written out directly. This lets
us retroactively rewrite buffered lines to include markup that affects the
following line, while avoiding the line number offset that would result from
inserting a line that wasn't in the original source.
Args:
text: The text to append to the line.
Returns:
True if there was an available empty line to which text could be
appended, and False otherwise.
"""
if self.empty_line_buffer:
last_line = self.empty_line_buffer.pop().rstrip()
last_line += text + '\n'
self.empty_line_buffer.append(last_line)
return True
else:
return False
def ConvertCodeBlock(self, line):
"""Converts any code block that may begin or end on this line.
Doxygen has (at least) two kinds of code blocks. Any block indented at
least four spaces gets formatted as code, but (for some reason) no syntax
highlighting is applied. Any block surrounded by "~~~" on both sides is
also treated as code, but these are syntax highlighted intelligently
depending on the file type. We typically write code blocks in the former
style, but we'd like them to be highlighted, so this function converts them
to the latter style by adding in the ~~~ lines.
To make this a bit more complicated, we would really prefer not to insert
new lines into the file, since that will make the line numbers shown in
doxygen not match the line numbers in the actual source code. For this
reason, we only perform the conversion if at least one "blank" line (empty
comment line) appears before the start of the code block. If we get down to
the bottom of the block and there's no blank line after it, we will be
forced to add a line, since we can't go back and undo what we already did.
Args:
line: The line to process.
Returns:
The converted line.
"""
if not self.in_code_block and re.match(r'\s*///\s{4,}', line):
if self.AppendToBufferedLine(' ~~~'):
# If this fails, we'll just leave it un-highlighted.
self.in_code_block = True
elif self.in_code_block and not re.match(r'\s*///\s{4,}', line):
if not self.AppendToBufferedLine(' ~~~'):
# This is bad. We don't have a buffered line to use to end the code
# block, so we'll have to insert one. This will cause the line
# numbers to stop matching the original source, unfortunately.
line = '/// ~~~\n' + line
self.in_code_block = False
return line
def ProcessLine(self, line):
"""Processes a line.
If the line is an empty line inside a comment, we buffer it for possible
rewriting later on. Otherwise, we transform it using our regexps and
write it (as well as any buffered blank lines) out to the output.
Args:
line: The line to process.
"""
line = self.Transform(line)
if line.strip() == '///':
# We may repurpose this empty line later, so don't write it out yet.
self.empty_line_buffer.append(line)
else:
line = self.ConvertCodeBlock(line)
# Flush the line buffer and write this line as well.
for buffered_line in self.empty_line_buffer:
self.out.write(buffered_line)
self.empty_line_buffer = []
self.out.write(line)
def main(argv):
sourcefile = argv[1]
with open(sourcefile, 'r') as infile:
formatter = DoxygenFormatter(sys.stdout)
for line in infile:
formatter.ProcessLine(line)
if __name__ == '__main__':
main(sys.argv)
|
apache-2.0
| 6,935,754,025,838,835,000 | 35.884444 | 80 | 0.662369 | false | 3.92017 | false | false | false |
FlannelFox/FlannelFox
|
tests/flannelfox/torrenttools/test_torrentQueue.py
|
1
|
1999
|
# -*- coding: utf-8 -*-
import unittest
from unittest.mock import patch
import os
from flannelfox.torrenttools.TorrentQueue import Queue
from flannelfox.torrenttools import Torrents
class TestTorrentQueue(unittest.TestCase):
testDatabaseFile = 'ff.db'
def removeDatabase(self):
try:
os.remove(self.testDatabaseFile)
except Exception:
pass
@patch.object(Queue, 'databaseTorrentBlacklisted')
@patch.object(Queue, 'databaseTorrentExists')
def test_Queue(self, mockDatabaseTorrentExists, mockDatabaseTorrentBlacklisted):
self.removeDatabase()
torrentQueue = Queue()
mockDatabaseTorrentBlacklisted.return_value = False
mockDatabaseTorrentExists.return_value = False
# Ensure len returns a valid answer
self.assertEqual(len(torrentQueue), 0)
# Make sure appending an item works
torrentQueue.append(Torrents.TV(torrentTitle='some.show.s01e01.720p.junk.here'))
self.assertEqual(len(torrentQueue), 1)
# Make sure appending a duplicate item does not work
torrentQueue.append(Torrents.TV(torrentTitle='some.show.s01e01.720p.junk.here'))
self.assertEqual(len(torrentQueue), 1)
# Add a different item and make sure it works
torrentQueue.append(Torrents.TV(torrentTitle='some.show.s01e02.720p.junk.here2'))
self.assertEqual(len(torrentQueue), 2)
mockDatabaseTorrentBlacklisted.return_value = True
mockDatabaseTorrentExists.return_value = False
# Check if Blacklisted torrent gets blocked
torrentQueue.append(Torrents.TV(torrentTitle='some.show.s01e02.720p.junk.here3'))
self.assertEqual(len(torrentQueue), 2)
mockDatabaseTorrentBlacklisted.return_value = False
mockDatabaseTorrentExists.return_value = True
# Check if Existing Torrent in Database gets blocked
torrentQueue.append(Torrents.TV(torrentTitle='some.show.s01e02.720p.junk.here3'))
self.assertEqual(len(torrentQueue), 2)
mockDatabaseTorrentBlacklisted.return_value = False
mockDatabaseTorrentExists.return_value = False
if __name__ == '__main__':
unittest.main()
|
mit
| -7,580,618,446,876,378,000 | 29.753846 | 83 | 0.78039 | false | 3.39966 | true | false | false |
vhosouza/invesalius3
|
invesalius/gui/task_exporter.py
|
1
|
15556
|
#--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
import os
import pathlib
import sys
import wx
try:
import wx.lib.agw.hyperlink as hl
except ImportError:
import wx.lib.hyperlink as hl
import wx.lib.platebtn as pbtn
from pubsub import pub as Publisher
import invesalius.constants as const
import invesalius.gui.dialogs as dlg
import invesalius.project as proj
import invesalius.session as ses
from invesalius import inv_paths
BTN_MASK = wx.NewId()
BTN_PICTURE = wx.NewId()
BTN_SURFACE = wx.NewId()
BTN_REPORT = wx.NewId()
BTN_REQUEST_RP = wx.NewId()
WILDCARD_SAVE_3D = "Inventor (*.iv)|*.iv|"\
"PLY (*.ply)|*.ply|"\
"Renderman (*.rib)|*.rib|"\
"STL (*.stl)|*.stl|"\
"STL ASCII (*.stl)|*.stl|"\
"VRML (*.vrml)|*.vrml|"\
"VTK PolyData (*.vtp)|*.vtp|"\
"Wavefront (*.obj)|*.obj|"\
"X3D (*.x3d)|*.x3d"
INDEX_TO_TYPE_3D = {0: const.FILETYPE_IV,
1: const.FILETYPE_PLY,
2: const.FILETYPE_RIB,
3: const.FILETYPE_STL,
4: const.FILETYPE_STL_ASCII,
5: const.FILETYPE_VRML,
6: const.FILETYPE_VTP,
7: const.FILETYPE_OBJ,
8: const.FILETYPE_X3D}
INDEX_TO_EXTENSION = {0: "iv",
1: "ply",
2: "rib",
3: "stl",
4: "stl",
5: "vrml",
6: "vtp",
7: "obj",
8: "x3d"}
WILDCARD_SAVE_2D = "BMP (*.bmp)|*.bmp|"\
"JPEG (*.jpg)|*.jpg|"\
"PNG (*.png)|*.png|"\
"PostScript (*.ps)|*.ps|"\
"Povray (*.pov)|*.pov|"\
"TIFF (*.tiff)|*.tiff"
INDEX_TO_TYPE_2D = {0: const.FILETYPE_BMP,
1: const.FILETYPE_JPG,
2: const.FILETYPE_PNG,
3: const.FILETYPE_PS,
4: const.FILETYPE_POV,
5: const.FILETYPE_OBJ}
WILDCARD_SAVE_MASK = "VTK ImageData (*.vti)|*.vti"
class TaskPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
inner_panel = InnerTaskPanel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(inner_panel, 1, wx.EXPAND | wx.GROW | wx.BOTTOM | wx.RIGHT |
wx.LEFT, 7)
sizer.Fit(self)
self.SetSizer(sizer)
self.Update()
self.SetAutoLayout(1)
class InnerTaskPanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent)
backgroud_colour = wx.Colour(255,255,255)
self.SetBackgroundColour(backgroud_colour)
self.SetAutoLayout(1)
# Counter for projects loaded in current GUI
# Fixed hyperlink items
tooltip = wx.ToolTip(_("Export InVesalius screen to an image file"))
link_export_picture = hl.HyperLinkCtrl(self, -1,
_("Export picture..."))
link_export_picture.SetUnderlines(False, False, False)
link_export_picture.SetBold(True)
link_export_picture.SetColours("BLACK", "BLACK", "BLACK")
link_export_picture.SetBackgroundColour(self.GetBackgroundColour())
link_export_picture.SetToolTip(tooltip)
link_export_picture.AutoBrowse(False)
link_export_picture.UpdateLink()
link_export_picture.Bind(hl.EVT_HYPERLINK_LEFT,
self.OnLinkExportPicture)
tooltip = wx.ToolTip(_("Export 3D surface"))
link_export_surface = hl.HyperLinkCtrl(self, -1,_("Export 3D surface..."))
link_export_surface.SetUnderlines(False, False, False)
link_export_surface.SetBold(True)
link_export_surface.SetColours("BLACK", "BLACK", "BLACK")
link_export_surface.SetBackgroundColour(self.GetBackgroundColour())
link_export_surface.SetToolTip(tooltip)
link_export_surface.AutoBrowse(False)
link_export_surface.UpdateLink()
link_export_surface.Bind(hl.EVT_HYPERLINK_LEFT,
self.OnLinkExportSurface)
#tooltip = wx.ToolTip(_("Export 3D mask (voxels)"))
#link_export_mask = hl.HyperLinkCtrl(self, -1,_("Export mask..."))
#link_export_mask.SetUnderlines(False, False, False)
#link_export_mask.SetColours("BLACK", "BLACK", "BLACK")
#link_export_mask.SetToolTip(tooltip)
#link_export_mask.AutoBrowse(False)
#link_export_mask.UpdateLink()
#link_export_mask.Bind(hl.EVT_HYPERLINK_LEFT,
# self.OnLinkExportMask)
#tooltip = wx.ToolTip("Request rapid prototyping services")
#link_request_rp = hl.HyperLinkCtrl(self,-1,"Request rapid prototyping...")
#link_request_rp.SetUnderlines(False, False, False)
#link_request_rp.SetColours("BLACK", "BLACK", "BLACK")
#link_request_rp.SetToolTip(tooltip)
#link_request_rp.AutoBrowse(False)
#link_request_rp.UpdateLink()
#link_request_rp.Bind(hl.EVT_HYPERLINK_LEFT, self.OnLinkRequestRP)
#tooltip = wx.ToolTip("Open report tool...")
#link_report = hl.HyperLinkCtrl(self,-1,"Open report tool...")
#link_report.SetUnderlines(False, False, False)
#link_report.SetColours("BLACK", "BLACK", "BLACK")
#link_report.SetToolTip(tooltip)
#link_report.AutoBrowse(False)
#link_report.UpdateLink()
#link_report.Bind(hl.EVT_HYPERLINK_LEFT, self.OnLinkReport)
# Image(s) for buttons
if sys.platform == 'darwin':
BMP_EXPORT_SURFACE = wx.Bitmap(\
os.path.join(inv_paths.ICON_DIR, "surface_export_original.png"),
wx.BITMAP_TYPE_PNG).ConvertToImage()\
.Rescale(25, 25).ConvertToBitmap()
BMP_TAKE_PICTURE = wx.Bitmap(\
os.path.join(inv_paths.ICON_DIR, "tool_photo_original.png"),
wx.BITMAP_TYPE_PNG).ConvertToImage()\
.Rescale(25, 25).ConvertToBitmap()
#BMP_EXPORT_MASK = wx.Bitmap("../icons/mask.png",
# wx.BITMAP_TYPE_PNG)
else:
BMP_EXPORT_SURFACE = wx.Bitmap(os.path.join(inv_paths.ICON_DIR, "surface_export.png"),
wx.BITMAP_TYPE_PNG).ConvertToImage()\
.Rescale(25, 25).ConvertToBitmap()
BMP_TAKE_PICTURE = wx.Bitmap(os.path.join(inv_paths.ICON_DIR, "tool_photo.png"),
wx.BITMAP_TYPE_PNG).ConvertToImage()\
.Rescale(25, 25).ConvertToBitmap()
#BMP_EXPORT_MASK = wx.Bitmap("../icons/mask_small.png",
# wx.BITMAP_TYPE_PNG)
# Buttons related to hyperlinks
button_style = pbtn.PB_STYLE_SQUARE | pbtn.PB_STYLE_DEFAULT
button_picture = pbtn.PlateButton(self, BTN_PICTURE, "",
BMP_TAKE_PICTURE,
style=button_style)
button_picture.SetBackgroundColour(self.GetBackgroundColour())
self.button_picture = button_picture
button_surface = pbtn.PlateButton(self, BTN_SURFACE, "",
BMP_EXPORT_SURFACE,
style=button_style)
button_surface.SetBackgroundColour(self.GetBackgroundColour())
#button_mask = pbtn.PlateButton(self, BTN_MASK, "",
# BMP_EXPORT_MASK,
# style=button_style)
#button_request_rp = pbtn.PlateButton(self, BTN_REQUEST_RP, "",
# BMP_IMPORT, style=button_style)
#button_report = pbtn.PlateButton(self, BTN_REPORT, "",
# BMP_IMPORT,
# style=button_style)
# When using PlaneButton, it is necessary to bind events from parent win
self.Bind(wx.EVT_BUTTON, self.OnButton)
# Tags and grid sizer for fixed items
flag_link = wx.EXPAND|wx.GROW|wx.LEFT|wx.TOP
flag_button = wx.EXPAND | wx.GROW
fixed_sizer = wx.FlexGridSizer(rows=2, cols=2, hgap=2, vgap=0)
fixed_sizer.AddGrowableCol(0, 1)
fixed_sizer.AddMany([ (link_export_picture, 1, flag_link, 3),
(button_picture, 0, flag_button),
(link_export_surface, 1, flag_link, 3),
(button_surface, 0, flag_button),])
#(link_export_mask, 1, flag_link, 3),
#(button_mask, 0, flag_button)])
#(link_report, 0, flag_link, 3),
#(button_report, 0, flag_button),
#(link_request_rp, 1, flag_link, 3),
#(button_request_rp, 0, flag_button)])
# Add line sizers into main sizer
main_sizer = wx.BoxSizer(wx.VERTICAL)
main_sizer.Add(fixed_sizer, 0, wx.GROW|wx.EXPAND)
# Update main sizer and panel layout
self.SetSizer(main_sizer)
self.Fit()
self.sizer = main_sizer
self.__init_menu()
def __init_menu(self):
menu = wx.Menu()
self.id_to_name = {const.AXIAL:_("Axial slice"),
const.CORONAL:_("Coronal slice"),
const.SAGITAL:_("Sagittal slice"),
const.VOLUME:_("Volume")}
for id in self.id_to_name:
item = wx.MenuItem(menu, id, self.id_to_name[id])
menu.Append(item)
self.menu_picture = menu
menu.Bind(wx.EVT_MENU, self.OnMenuPicture)
def OnMenuPicture(self, evt):
id = evt.GetId()
value = dlg.ExportPicture(self.id_to_name[id])
if value:
filename, filetype = value
Publisher.sendMessage('Export picture to file',
orientation=id, filename=filename, filetype=filetype)
def OnLinkExportPicture(self, evt=None):
self.button_picture.PopupMenu(self.menu_picture)
def OnLinkExportMask(self, evt=None):
project = proj.Project()
if sys.platform == 'win32':
project_name = project.name
else:
project_name = project.name+".vti"
dlg = wx.FileDialog(None,
"Save mask as...", # title
"", # last used directory
project_name, # filename
WILDCARD_SAVE_MASK,
wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)
dlg.SetFilterIndex(0) # default is VTI
if dlg.ShowModal() == wx.ID_OK:
filename = dlg.GetPath()
extension = "vti"
if sys.platform != 'win32':
if filename.split(".")[-1] != extension:
filename = filename + "."+ extension
filetype = const.FILETYPE_IMAGEDATA
Publisher.sendMessage('Export mask to file',
filename=filename,
filetype=filetype)
def OnLinkExportSurface(self, evt=None):
"OnLinkExportSurface"
project = proj.Project()
n_surface = 0
for index in project.surface_dict:
if project.surface_dict[index].is_shown:
n_surface += 1
if n_surface:
if sys.platform == 'win32':
project_name = pathlib.Path(project.name).stem
else:
project_name = pathlib.Path(project.name).stem + ".stl"
session = ses.Session()
last_directory = session.get('paths', 'last_directory_3d_surface', '')
dlg = wx.FileDialog(None,
_("Save 3D surface as..."), # title
last_directory, # last used directory
project_name, # filename
WILDCARD_SAVE_3D,
wx.FD_SAVE|wx.FD_OVERWRITE_PROMPT)
dlg.SetFilterIndex(3) # default is STL
if dlg.ShowModal() == wx.ID_OK:
filetype_index = dlg.GetFilterIndex()
filetype = INDEX_TO_TYPE_3D[filetype_index]
filename = dlg.GetPath()
extension = INDEX_TO_EXTENSION[filetype_index]
if sys.platform != 'win32':
if filename.split(".")[-1] != extension:
filename = filename + "."+ extension
if filename:
session['paths']['last_directory_3d_surface'] = os.path.split(filename)[0]
session.WriteSessionFile()
Publisher.sendMessage('Export surface to file',
filename=filename, filetype=filetype)
if not os.path.exists(filename):
dlg = wx.MessageDialog(None,
_("It was not possible to save the surface."),
_("Error saving surface"),
wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
else:
dlg = wx.MessageDialog(None,
_("You need to create a surface and make it ") +
_("visible before exporting it."),
'InVesalius 3',
wx.OK | wx.ICON_INFORMATION)
try:
dlg.ShowModal()
finally:
dlg.Destroy()
def OnLinkRequestRP(self, evt=None):
pass
def OnLinkReport(self, evt=None):
pass
def OnButton(self, evt):
id = evt.GetId()
if id == BTN_PICTURE:
self.OnLinkExportPicture()
elif id == BTN_SURFACE:
self.OnLinkExportSurface()
elif id == BTN_REPORT:
self.OnLinkReport()
elif id == BTN_REQUEST_RP:
self.OnLinkRequestRP()
else:# id == BTN_MASK:
self.OnLinkExportMask()
|
gpl-2.0
| -955,293,151,013,029,400 | 39.300518 | 98 | 0.509771 | false | 3.917401 | false | false | false |
bmya/tkobr-addons
|
tko_web_sessions_management/main.py
|
1
|
11671
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import openerp
from openerp.osv import fields, osv, orm
import pytz
from datetime import date, datetime, time, timedelta
from dateutil.relativedelta import *
from openerp.addons.base.ir.ir_cron import _intervalTypes
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.http import request
from openerp.tools.translate import _
from openerp import http
import werkzeug.contrib.sessions
from openerp.http import Response
# from openerp import pooler
_logger = logging.getLogger(__name__)
class Home_tkobr(openerp.addons.web.controllers.main.Home):
@http.route('/web/login', type='http', auth="none")
def web_login(self, redirect=None, **kw):
openerp.addons.web.controllers.main.ensure_db()
multi_ok = True
calendar_set = 0
calendar_ok = False
calendar_group = ''
unsuccessful_message = ''
now = datetime.now()
if request.httprequest.method == 'GET' and redirect and request.session.uid:
return http.redirect_with_hash(redirect)
if not request.uid:
request.uid = openerp.SUPERUSER_ID
values = request.params.copy()
if not redirect:
redirect = '/web?' + request.httprequest.query_string
values['redirect'] = redirect
try:
values['databases'] = http.db_list()
except openerp.exceptions.AccessDenied:
values['databases'] = None
if request.httprequest.method == 'POST':
old_uid = request.uid
uid = False
if 'login' in request.params and 'password' in request.params:
uid = request.session.authenticate(request.session.db, request.params[
'login'], request.params['password'])
if uid is not False:
user = request.registry.get('res.users').browse(
request.cr, request.uid, uid, request.context)
if not uid is SUPERUSER_ID:
# check for multiple sessions block
sessions = request.registry.get('ir.sessions').search(
request.cr, request.uid, [
('user_id', '=', uid), ('logged_in', '=', True)], context=request.context)
if sessions and user.multiple_sessions_block:
multi_ok = False
if multi_ok:
# check calendars
calendar_obj = request.registry.get(
'resource.calendar')
attendance_obj = request.registry.get(
'resource.calendar.attendance')
# GET USER LOCAL TIME
if user.tz:
tz = pytz.timezone(user.tz)
else:
tz = pytz.timezone('GMT')
tzoffset = tz.utcoffset(now)
now = now + tzoffset
if user.login_calendar_id:
calendar_set += 1
# check user calendar
attendances = attendance_obj.search(request.cr,
request.uid, [('calendar_id', '=', user.login_calendar_id.id),
('dayofweek', '=', str(now.weekday())),
('hour_from', '<=', now.hour + now.minute / 60.0),
('hour_to', '>=', now.hour + now.minute / 60.0)],
context=request.context)
if attendances:
calendar_ok = True
else:
unsuccessful_message = "unsuccessful login from '%s', user time out of allowed calendar defined in user" % request.params[
'login']
else:
# check user groups calendar
for group in user.groups_id:
if group.login_calendar_id:
calendar_set += 1
attendances = attendance_obj.search(request.cr,
request.uid, [('calendar_id', '=', group.login_calendar_id.id),
('dayofweek', '=', str(now.weekday())),
('hour_from', '<=', now.hour + now.minute / 60.0),
('hour_to', '>=', now.hour + now.minute / 60.0)],
context=request.context)
if attendances:
calendar_ok = True
else:
calendar_group = group.name
if sessions and group.multiple_sessions_block and multi_ok:
multi_ok = False
unsuccessful_message = "unsuccessful login from '%s', multisessions block defined in group '%s'" % (
request.params['login'], group.name)
break
if calendar_set > 0 and calendar_ok == False:
unsuccessful_message = "unsuccessful login from '%s', user time out of allowed calendar defined in group '%s'" % (
request.params['login'], calendar_group)
else:
unsuccessful_message = "unsuccessful login from '%s', multisessions block defined in user" % request.params[
'login']
else:
unsuccessful_message = "unsuccessful login from '%s', wrong username or password" % request.params[
'login']
if not unsuccessful_message or uid is SUPERUSER_ID:
self.save_session(
request.cr,
uid,
user.tz,
request.httprequest.session.sid,
context=request.context)
return http.redirect_with_hash(redirect)
user = request.registry.get('res.users').browse(
request.cr, SUPERUSER_ID, SUPERUSER_ID, request.context)
self.save_session(
request.cr,
uid,
user.tz,
request.httprequest.session.sid,
unsuccessful_message,
request.context)
_logger.error(unsuccessful_message)
request.uid = old_uid
values['error'] = 'Login failed due to one of the following reasons:'
values['reason1'] = '- Wrong login/password'
values['reason2'] = '- User not allowed to have multiple logins'
values[
'reason3'] = '- User not allowed to login at this specific time or day'
return request.render('web.login', values)
def save_session(
self,
cr,
uid,
tz,
sid,
unsuccessful_message='',
context=None):
now = fields.datetime.now()
session_obj = request.registry.get('ir.sessions')
cr = request.registry.cursor()
# for GeoIP
geo_ip_resolver = None
ip_location = ""
try:
import GeoIP
geo_ip_resolver = GeoIP.open(
'/usr/share/GeoIP/GeoIP.dat',
GeoIP.GEOIP_STANDARD)
except ImportError:
geo_ip_resolver = False
if geo_ip_resolver:
ip_location = (str(geo_ip_resolver.country_name_by_addr(
request.httprequest.remote_addr)) or "")
# autocommit: our single update request will be performed atomically.
# (In this way, there is no opportunity to have two transactions
# interleaving their cr.execute()..cr.commit() calls and have one
# of them rolled back due to a concurrent access.)
cr.autocommit(True)
user = request.registry.get('res.users').browse(
cr, request.uid, uid, request.context)
ip = request.httprequest.headers.environ['REMOTE_ADDR']
logged_in = True
if unsuccessful_message:
uid = SUPERUSER_ID
logged_in = False
sessions = False
else:
sessions = session_obj.search(cr, uid, [('session_id', '=', sid),
('ip', '=', ip),
('user_id', '=', uid),
('logged_in', '=', True)],
context=context)
if not sessions:
values = {
'user_id': uid,
'logged_in': logged_in,
'session_id': sid,
'session_seconds': user.session_default_seconds,
'multiple_sessions_block': user.multiple_sessions_block,
'date_login': now,
'expiration_date': datetime.strftime(
(datetime.strptime(
now,
DEFAULT_SERVER_DATETIME_FORMAT) +
relativedelta(
seconds=user.session_default_seconds)),
DEFAULT_SERVER_DATETIME_FORMAT),
'ip': ip,
'ip_location': ip_location,
'remote_tz': tz or 'GMT',
'unsuccessful_message': unsuccessful_message,
}
session_obj.create(cr, uid, values, context=context)
cr.commit()
cr.close()
return True
@http.route('/web/session/logout', type='http', auth="none")
def logout(self, redirect='/web'):
request.session.logout(keep_db=True, logout_type='ul')
return werkzeug.utils.redirect(redirect, 303)
|
agpl-3.0
| -2,443,623,195,217,171,000 | 46.060484 | 154 | 0.476223 | false | 5.164159 | false | false | false |
limemadness/selenium_training
|
test_countries_sort.py
|
1
|
2050
|
import pytest
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
@pytest.fixture
#def driver(request):
# wd = webdriver.Firefox(firefox_binary="c:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe")
# print(wd.capabilities)
# request.addfinalizer(wd.quit)
# return wd
def driver(request):
wd = webdriver.Chrome()
wd.implicitly_wait(10)
request.addfinalizer(wd.quit)
return wd
def test_countries_sort(driver):
driver.get("http://localhost/litecart/admin/")
driver.find_element_by_name("username").click()
driver.find_element_by_name("username").send_keys("admin")
driver.find_element_by_name("password").click()
driver.find_element_by_name("password").send_keys("admin")
driver.find_element_by_xpath("//div[2]/button").click()
driver.get("http://localhost/litecart/admin/?app=countries&doc=countries")
#get country data
countries = driver.find_elements_by_css_selector("#content tr.row")
countries_timezone_url = []
country_name = []
#verify alphabetical order of country names
for country in countries:
country_name.append(country.find_element_by_css_selector("td:nth-child(5)").text)
assert sorted(country_name) == country_name
#get countries with multiple timezones
for country in countries:
if int(country.find_element_by_css_selector("td:nth-child(6)").text) > 0:
countries_timezone_url.append(country.find_element_by_css_selector("td:nth-child(5) a").get_attribute("href"))
#verify alphabetical order of timezones
for country_timezone_url in countries_timezone_url:
driver.get(country_timezone_url)
timezone_list = driver.find_elements_by_css_selector("#table-zones td:nth-child(2)")
del timezone_list[-1:]
timezones = []
for timezone in timezone_list:
timezones.append(timezone.text)
print(timezones)
assert sorted(timezones) == timezones
|
apache-2.0
| -2,674,985,192,745,299,500 | 40 | 122 | 0.699024 | false | 3.693694 | false | false | false |
Beyond-Imagination/BlubBlub
|
ChatbotServer/ChatbotEnv/Lib/site-packages/konlpy/corpus.py
|
1
|
1849
|
#! /usr/bin/python2.7
# -*- coding: utf-8 -*-
import os
from . import utils
class CorpusLoader():
"""Loader for corpora.
For a complete list of corpora available in KoNLPy,
refer to :ref:`corpora`.
.. code-block:: python
>>> from konlpy.corpus import kolaw
>>> fids = kolaw.fileids()
>>> fobj = kolaw.open(fids[0])
>>> print fobj.read(140)
대한민국헌법
유구한 역사와 전통에 빛나는 우리 대한국민은 3·1운동으로 건립된 대한민국임시정부의 법통과 불의에 항거한 4·19민주이념을 계승하고, 조국의 민주개혁과 평화적 통일의 사명에 입각하여 정의·인도와 동포애로써 민족의 단결을 공고히 하고, 모든 사회적 폐습과 불의를 타파하며, 자율과 조화를 바 바
"""
def abspath(self, filename=None):
"""Absolute path of corpus file.
If ``filename`` is *None*, returns absolute path of corpus.
:param filename: Name of a particular file in the corpus.
"""
basedir = '%s/data/corpus/%s' % (utils.installpath, self.name)
if filename:
return '%s/%s' % (basedir, filename)
else:
return '%s/' % basedir
def fileids(self):
"""List of file IDs in the corpus."""
return os.listdir(self.abspath())
def open(self, filename):
"""Method to open a file in the corpus.
Returns a file object.
:param filename: Name of a particular file in the corpus.
"""
return utils.load_txt(self.abspath(filename))
def __init__(self, name=None):
if not name:
raise Exception("You need to input the name of the corpus")
else:
self.name = name
kolaw = CorpusLoader('kolaw')
kobill = CorpusLoader('kobill')
|
gpl-3.0
| 3,655,224,125,584,470,500 | 27.035088 | 171 | 0.58761 | false | 2.201102 | false | false | false |
ErickMurillo/aprocacaho
|
organizacion/admin.py
|
1
|
3456
|
from django.contrib import admin
from .models import *
# Register your models here.
#organizacion
class InlineEscuelaCampo(admin.TabularInline):
model = EscuelaCampo
extra = 1
class OrganizacionAdmin(admin.ModelAdmin):
inlines = [InlineEscuelaCampo]
list_display = ('id','nombre','siglas')
list_display_links = ('id','nombre','siglas')
#encuesta organizacion
class InlineAspectosJuridicos(admin.TabularInline):
model = AspectosJuridicos
max_num = 1
can_delete = False
class InlineListaMiembros(admin.TabularInline):
model = ListaMiembros
extra = 1
class InlineDocumentacion(admin.TabularInline):
model = Documentacion
extra = 1
max_num = 7
class InlineProduccionComercializacion(admin.TabularInline):
model = ProduccionComercializacion
extra = 1
class InlineNivelCumplimiento(admin.TabularInline):
model = NivelCumplimiento
extra = 1
max_num = 7
# class InlineDatosProductivos(admin.TabularInline):
# model = DatosProductivos
# extra = 1
# max_num = 4
#
# class InlineDatosProductivosTabla(admin.TabularInline):
# model = DatosProductivosTabla
# extra = 1
# max_num = 2
class InlineInfraestructura(admin.TabularInline):
model = Infraestructura
extra = 1
class InlineTransporte(admin.TabularInline):
model = Transporte
max_num = 1
can_delete = False
# class InlineComercializacion(admin.TabularInline):
# model = Comercializacion
# extra = 1
# max_num = 3
#
# class InlineCacaoComercializado(admin.TabularInline):
# model = CacaoComercializado
# max_num = 1
# can_delete = False
class InlineCertificacionOrg(admin.TabularInline):
model = CertificacionOrg
max_num = 1
can_delete = False
class InlineDestinoProdCorriente(admin.TabularInline):
model = DestinoProdCorriente
extra = 1
max_num = 4
class InlineDestinoProdFermentado(admin.TabularInline):
model = DestinoProdFermentado
extra = 1
max_num = 4
class InlineFinanciamiento(admin.TabularInline):
model = Financiamiento
max_num = 1
can_delete = False
class InlineFinanciamientoProductores(admin.TabularInline):
model = FinanciamientoProductores
extra = 1
max_num = 5
class InlineInfoFinanciamiento(admin.TabularInline):
model = InfoFinanciamiento
extra = 1
max_num = 4
class EncuestaOrganicacionAdmin(admin.ModelAdmin):
# def get_queryset(self, request):
# if request.user.is_superuser:
# return EncuestaOrganicacion.objects.all()
# return EncuestaOrganicacion.objects.filter(usuario=request.user)
def save_model(self, request, obj, form, change):
obj.usuario = request.user
obj.save()
inlines = [InlineAspectosJuridicos,InlineListaMiembros,InlineDocumentacion,
InlineNivelCumplimiento,InlineProduccionComercializacion,
InlineInfraestructura,InlineTransporte,
InlineCertificacionOrg,InlineDestinoProdCorriente,InlineDestinoProdFermentado,
InlineFinanciamiento,InlineFinanciamientoProductores,InlineInfoFinanciamiento]
list_display = ('id','organizacion','fecha')
list_display_links = ('id','organizacion')
class Media:
css = {
'all': ('css/admin.css',)
}
js = ('js/admin_org.js',)
admin.site.register(Organizacion,OrganizacionAdmin)
admin.site.register(EncuestaOrganicacion,EncuestaOrganicacionAdmin)
|
mit
| -7,994,590,496,220,483,000 | 26.648 | 94 | 0.712095 | false | 3.167736 | false | false | false |
srio/shadow3-scripts
|
transfocator_id30b.py
|
1
|
25823
|
import numpy
import xraylib
"""
transfocator_id30b : transfocator for id13b:
It can:
1) guess the lens configuration (number of lenses for each type) for a given photon energy
and target image size. Use transfocator_compute_configuration() for this task
2) for a given transfocator configuration, compute the main optical parameters
(image size, focal distance, focal position and divergence).
Use transfocator_compute_parameters() for this task
3) Performs full ray tracing. Use id30b_ray_tracing() for this task
Note that for the optimization and parameters calculations the transfocator configuration is
given in keywords. For ray tracing calculations many parameters of the transfocator are hard coded
with the values of id30b
See main program for examples.
Dependencies:
Numpy
xraylib (to compute refracion indices)
Shadow (for ray tracing only)
matplotlib (for some plots of ray=tracing)
Side effects:
When running ray tracing some files are created.
MODIFICATION HISTORY:
2015-03-25 srio@esrf.eu, written
"""
__author__ = "Manuel Sanchez del Rio"
__contact__ = "srio@esrf.eu"
__copyright__ = "ESRF, 2015"
def transfocator_compute_configuration(photon_energy_ev,s_target,\
symbol=["Be","Be","Be"], density=[1.845,1.845,1.845],\
nlenses_max = [15,3,1], nlenses_radii = [500e-4,1000e-4,1500e-4], lens_diameter=0.05, \
sigmaz=6.46e-4, alpha = 0.55, \
tf_p=5960, tf_q=3800, verbose=1 ):
"""
Computes the optimum transfocator configuration for a given photon energy and target image size.
All length units are cm
:param photon_energy_ev: the photon energy in eV
:param s_target: the target image size in cm.
:param symbol: the chemical symbol of the lens material of each type. Default symbol=["Be","Be","Be"]
:param density: the density of each type of lens. Default: density=[1.845,1.845,1.845]
:param nlenses_max: the maximum allowed number of lenases for each type of lens. nlenses_max = [15,3,1]
:param nlenses_radii: the radii in cm of each type of lens. Default: nlenses_radii = [500e-4,1000e-4,1500e-4]
:param lens_diameter: the physical diameter (acceptance) in cm of the lenses. If different for each type of lens,
consider the smaller one. Default: lens_diameter=0.05
:param sigmaz: the sigma (standard deviation) of the source in cm
:param alpha: an adjustable parameter in [0,1](see doc). Default: 0.55 (it is 0.76 for pure Gaussian beams)
:param tf_p: the distance source-transfocator in cm
:param tf_q: the distance transfocator-image in cm
:param:verbose: set to 1 for verbose text output
:return: a list with the number of lenses of each type.
"""
if s_target < 2.35*sigmaz*tf_q/tf_p:
print("Source size FWHM is: %f um"%(1e4*2.35*sigmaz))
print("Maximum Demagnifications is: %f um"%(tf_p/tf_q))
print("Minimum possible size is: %f um"%(1e4*2.35*sigmaz*tf_q/tf_p))
print("Error: redefine size")
return None
deltas = [(1.0 - xraylib.Refractive_Index_Re(symbol[i],photon_energy_ev*1e-3,density[i])) \
for i in range(len(symbol))]
focal_q_target = _tansfocator_guess_focal_position( s_target, p=tf_p, q=tf_q, sigmaz=sigmaz, alpha=alpha, \
lens_diameter=lens_diameter,method=2)
focal_f_target = 1.0 / (1.0/focal_q_target + 1.0/tf_p)
div_q_target = alpha * lens_diameter / focal_q_target
#corrections for extreme cases
source_demagnified = 2.35*sigmaz*focal_q_target/tf_p
if source_demagnified > lens_diameter: source_demagnified = lens_diameter
s_target_calc = numpy.sqrt( (div_q_target*(tf_q-focal_q_target))**2 + source_demagnified**2)
nlenses_target = _transfocator_guess_configuration(focal_f_target,deltas=deltas,\
nlenses_max=nlenses_max,radii=nlenses_radii, )
if verbose:
print("transfocator_compute_configuration: focal_f_target: %f"%(focal_f_target))
print("transfocator_compute_configuration: focal_q_target: %f cm"%(focal_q_target))
print("transfocator_compute_configuration: s_target: %f um"%(s_target_calc*1e4))
print("transfocator_compute_configuration: nlenses_target: ",nlenses_target)
return nlenses_target
def transfocator_compute_parameters(photon_energy_ev, nlenses_target,\
symbol=["Be","Be","Be"], density=[1.845,1.845,1.845],\
nlenses_max = [15,3,1], nlenses_radii = [500e-4,1000e-4,1500e-4], lens_diameter=0.05, \
sigmaz=6.46e-4, alpha = 0.55, \
tf_p=5960, tf_q=3800 ):
"""
Computes the parameters of the optical performances of a given transgocator configuration.
returns a l
All length units are cm
:param photon_energy_ev:
:param nlenses_target: a list with the lens configuration, i.e. the number of lenses of each type.
:param symbol: the chemical symbol of the lens material of each type. Default symbol=["Be","Be","Be"]
:param density: the density of each type of lens. Default: density=[1.845,1.845,1.845]
:param nlenses_max: the maximum allowed number of lenases for each type of lens. nlenses_max = [15,3,1]
TODO: remove (not used)
:param nlenses_radii: the radii in cm of each type of lens. Default: nlenses_radii = [500e-4,1000e-4,1500e-4]
:param lens_diameter: the physical diameter (acceptance) in cm of the lenses. If different for each type of lens,
consider the smaller one. Default: lens_diameter=0.05
:param sigmaz: the sigma (standard deviation) of the source in cm
:param alpha: an adjustable parameter in [0,1](see doc). Default: 0.55 (it is 0.76 for pure Gaussian beams)
:param tf_p: the distance source-transfocator in cm
:param tf_q: the distance transfocator-image in cm
:return: a list with parameters (image_siza, lens_focal_distance,
focal_position from transfocator center, divergence of beam after the transfocator)
"""
deltas = [(1.0 - xraylib.Refractive_Index_Re(symbol[i],photon_energy_ev*1e-3,density[i])) \
for i in range(len(symbol))]
focal_f = _transfocator_calculate_focal_distance( deltas=deltas,\
nlenses=nlenses_target,radii=nlenses_radii)
focal_q = 1.0 / (1.0/focal_f - 1.0/tf_p)
div_q = alpha * lens_diameter / focal_q
#corrections
source_demagnified = 2.35*sigmaz*focal_q/tf_p
if source_demagnified > lens_diameter: source_demagnified = lens_diameter
s_target = numpy.sqrt( (div_q*(tf_q-focal_q))**2 + (source_demagnified)**2 )
return (s_target,focal_f,focal_q,div_q)
def transfocator_nlenses_to_slots(nlenses,nlenses_max=None):
"""
converts the transfocator configuration from a list of the number of lenses of each type,
into a list of active (1) or inactive (0) actuators for the slots.
:param nlenses: the list with number of lenses (e.g., [5,2,0]
:param nlenses_max: the maximum number of lenses of each type, usually powers of two minus one.
E.g. [15,3,1]
:return: a list of on (1) and off (0) slots, e.g., [1, 0, 1, 0, 0, 1, 0]
(first type: 1*1+0*2+1*4+0*8=5, second type: 0*1+1*2=2, third type: 0*1=0)
"""
if nlenses_max == None:
nlenses_max = nlenses
ss = []
for i,iopt in enumerate(nlenses):
if iopt > nlenses_max[i]:
print("Error: i:%d, nlenses: %d, nlenses_max: %d"%(i,iopt,nlenses_max[i]))
ncharacters = len("{0:b}".format(nlenses_max[i]))
si = list( ("{0:0%db}"%(ncharacters)).format(int(iopt)) )
si.reverse()
ss += si
on_off = [int(i) for i in ss]
#print("transfocator_nlenses_to_slots: nlenses_max: ",nlenses_max," nlenses: ",nlenses," slots: ",on_off)
return on_off
def _transfocator_calculate_focal_distance(deltas=[0.999998],nlenses=[1],radii=[500e-4]):
inverse_focal_distance = 0.0
for i,nlensesi in enumerate(nlenses):
if nlensesi > 0:
focal_distance_i = radii[i] / (2.*nlensesi*deltas[i])
inverse_focal_distance += 1.0/focal_distance_i
if inverse_focal_distance == 0:
return 99999999999999999999999999.
else:
return 1.0/inverse_focal_distance
def _tansfocator_guess_focal_position( s_target, p=5960., q=3800.0, sigmaz=6.46e-4, \
alpha=0.66, lens_diameter=0.05, method=2):
x = 1e15
if method == 1: # simple sum
AA = 2.35*sigmaz/p
BB = -(s_target + alpha * lens_diameter)
CC = alpha*lens_diameter*q
cc = numpy.roots([AA,BB,CC])
x = cc[1]
return x
if method == 2: # sum in quadrature
AA = ( (2.35*sigmaz)**2)/(p**2)
BB = 0.0
CC = alpha**2 * lens_diameter**2 - s_target**2
DD = - 2.0 * alpha**2 * lens_diameter**2 * q
EE = alpha**2 * lens_diameter**2 * q**2
cc = numpy.roots([AA,BB,CC,DD,EE])
for i,cci in enumerate(cc):
if numpy.imag(cci) == 0:
return numpy.real(cci)
return x
def _transfocator_guess_configuration(focal_f_target,deltas=[0.999998],nlenses_max=[15],radii=[500e-4]):
nn = len(nlenses_max)
ncombinations = (1+nlenses_max[0]) * (1+nlenses_max[1]) * (1+nlenses_max[2])
icombinations = 0
aa = numpy.zeros((3,ncombinations),dtype=int)
bb = numpy.zeros(ncombinations)
for i0 in range(1+nlenses_max[0]):
for i1 in range(1+nlenses_max[1]):
for i2 in range(1+nlenses_max[2]):
aa[0,icombinations] = i0
aa[1,icombinations] = i1
aa[2,icombinations] = i2
bb[icombinations] = focal_f_target - _transfocator_calculate_focal_distance(deltas=deltas,nlenses=[i0,i1,i2],radii=radii)
icombinations += 1
bb1 = numpy.abs(bb)
ibest = bb1.argmin()
return (aa[:,ibest]).tolist()
#
#
#
def id30b_ray_tracing(emittH=4e-9,emittV=1e-11,betaH=35.6,betaV=3.0,number_of_rays=50000,\
density=1.845,symbol="Be",tf_p=1000.0,tf_q=1000.0,lens_diameter=0.05,\
slots_max=None,slots_on_off=None,photon_energy_ev=14000.0,\
slots_lens_thickness=None,slots_steps=None,slots_radii=None,\
s_target=10e-4,focal_f=10.0,focal_q=10.0,div_q=1e-6):
#=======================================================================================================================
# Gaussian undulator source
#=======================================================================================================================
import Shadow
#import Shadow.ShadowPreprocessorsXraylib as sx
sigmaXp = numpy.sqrt(emittH/betaH)
sigmaZp = numpy.sqrt(emittV/betaV)
sigmaX = emittH/sigmaXp
sigmaZ = emittV/sigmaZp
print("\n\nElectron sizes H:%f um, V:%fu m;\nelectron divergences: H:%f urad, V:%f urad"%\
(sigmaX*1e6, sigmaZ*1e6, sigmaXp*1e6, sigmaZp*1e6))
# set Gaussian source
src = Shadow.Source()
src.set_energy_monochromatic(photon_energy_ev)
src.set_gauss(sigmaX*1e2,sigmaZ*1e2,sigmaXp,sigmaZp)
print("\n\nElectron sizes stored H:%f um, V:%f um;\nelectron divergences: H:%f urad, V:%f urad"%\
(src.SIGMAX*1e4,src.SIGMAZ*1e4,src.SIGDIX*1e6,src.SIGDIZ*1e6))
src.apply_gaussian_undulator(undulator_length_in_m=2.8, user_unit_to_m=1e-2, verbose=1)
print("\n\nElectron sizes stored (undulator) H:%f um, V:%f um;\nelectron divergences: H:%f urad, V:%f urad"%\
(src.SIGMAX*1e4,src.SIGMAZ*1e4,src.SIGDIX*1e6,src.SIGDIZ*1e6))
print("\n\nSource size in vertical FWHM: %f um\n"%\
(2.35*src.SIGMAZ*1e4))
src.NPOINT = number_of_rays
src.ISTAR1 = 0 # 677543155
src.write("start.00")
# create source
beam = Shadow.Beam()
beam.genSource(src)
beam.write("begin.dat")
src.write("end.00")
#=======================================================================================================================
# complete the (detailed) transfocator description
#=======================================================================================================================
print("\nSetting detailed Transfocator for ID30B")
slots_nlenses = numpy.array(slots_max)*numpy.array(slots_on_off)
slots_empty = (numpy.array(slots_max)-slots_nlenses)
#
####interactive=True, SYMBOL="SiC",DENSITY=3.217,FILE="prerefl.dat",E_MIN=100.0,E_MAX=20000.0,E_STEP=100.0
Shadow.ShadowPreprocessorsXraylib.prerefl(interactive=False,E_MIN=2000.0,E_MAX=55000.0,E_STEP=100.0,\
DENSITY=density,SYMBOL=symbol,FILE="Be2_55.dat" )
nslots = len(slots_max)
prerefl_file = ["Be2_55.dat" for i in range(nslots)]
print("slots_max: ",slots_max)
#print("slots_target: ",slots_target)
print("slots_on_off: ",slots_on_off)
print("slots_steps: ",slots_steps)
print("slots_radii: ",slots_radii)
print("slots_nlenses: ",slots_nlenses)
print("slots_empty: ",slots_empty)
#calculate distances, nlenses and slots_empty
# these are distances p and q with TF length removed
tf_length = numpy.array(slots_steps).sum() #tf length in cm
tf_fs_before = tf_p - 0.5*tf_length #distance from source to center of transfocator
tf_fs_after = tf_q - 0.5*tf_length # distance from center of transfocator to image
# for each slot, these are the empty distances before and after the lenses
tf_p0 = numpy.zeros(nslots)
tf_q0 = numpy.array(slots_steps) - (numpy.array(slots_max) * slots_lens_thickness)
# add now the p q distances
tf_p0[0] += tf_fs_before
tf_q0[-1] += tf_fs_after
print("tf_p0: ",tf_p0)
print("tf_q0: ",tf_q0)
print("tf_length: %f cm"%(tf_length))
# build transfocator
tf = Shadow.CompoundOE(name='TF ID30B')
tf.append_transfocator(tf_p0.tolist(), tf_q0.tolist(), \
nlenses=slots_nlenses.tolist(), radius=slots_radii, slots_empty=slots_empty.tolist(),\
thickness=slots_lens_thickness, prerefl_file=prerefl_file,\
surface_shape=4, convex_to_the_beam=0, diameter=lens_diameter,\
cylinder_angle=0.0,interthickness=50e-4,use_ccc=0)
itmp = input("SHADOW Source complete. Do you want to run SHADOR trace? [1=Yes,0=No]: ")
if str(itmp) != "1":
return
#trace system
tf.dump_systemfile()
beam.traceCompoundOE(tf,write_start_files=0,write_end_files=0,write_star_files=0, write_mirr_files=0)
#write only last result file
beam.write("star_tf.dat")
print("\nFile written to disk: star_tf.dat")
#
# #ideal calculations
#
print("\n\n\n")
print("=============================================== TRANSFOCATOR OUTPUTS ==========================================")
print("\nTHEORETICAL results: ")
print("REMIND-----With these lenses we obtained (analytically): ")
print("REMIND----- focal_f: %f cm"%(focal_f))
print("REMIND----- focal_q: %f cm"%(focal_q))
print("REMIND----- s_target: %f um"%(s_target*1e4))
demagnification_factor = tf_p/focal_q
theoretical_focal_size = src.SIGMAZ*2.35/demagnification_factor
# analyze shadow results
print("\nSHADOW results: ")
st1 = beam.get_standard_deviation(3,ref=0)
st2 = beam.get_standard_deviation(3,ref=1)
print(" stDev*2.35: unweighted: %f um, weighted: %f um "%(st1*2.35*1e4,st2*2.35*1e4))
tk = beam.histo1(3, nbins=75, ref=1, nolost=1, write="HISTO1")
print(" Histogram FWHM: %f um "%(1e4*tk["fwhm"]))
print(" Transmitted intensity: %f (source was: %d) (transmission is %f %%) "%(beam.intensity(nolost=1), src.NPOINT, beam.intensity(nolost=1)/src.NPOINT*100))
#scan around image
xx1 = numpy.linspace(0.0,1.1*tf_fs_after,11) # position from TF exit plane
#xx0 = focal_q - tf_length*0.5
xx0 = focal_q - tf_length*0.5 # position of focus from TF exit plane
xx2 = numpy.linspace(xx0-100.0,xx0+100,21) # position from TF exit plane
xx3 = numpy.array([tf_fs_after])
xx = numpy.concatenate(([-0.5*tf_length],xx1,xx2,[tf_fs_after]))
xx.sort()
f = open("id30b.spec","w")
f.write("#F id30b.spec\n")
f.write("\n#S 1 calculations for id30b transfocator\n")
f.write("#N 8\n")
labels = " %18s %18s %18s %18s %18s %18s %18s %18s"%\
("pos from source","pos from image","[pos from TF]", "pos from TF center", "pos from focus",\
"fwhm shadow(stdev)","fwhm shadow(histo)","fwhm theoretical")
f.write("#L "+labels+"\n")
out = numpy.zeros((8,xx.size))
for i,pos in enumerate(xx):
beam2 = beam.duplicate()
beam2.retrace(-tf_fs_after+pos)
fwhm1 = 2.35*1e4*beam2.get_standard_deviation(3,ref=1,nolost=1)
tk = beam2.histo1(3, nbins=75, ref=1, nolost=1)
fwhm2 = 1e4*tk["fwhm"]
#fwhm_th = 1e4*transfocator_calculate_estimated_size(pos,diameter=diameter,focal_distance=focal_q)
fwhm_th2 = 1e4*numpy.sqrt( (div_q*(pos+0.5*tf_length-focal_q))**2 + theoretical_focal_size**2 )
#fwhm_th2 = 1e4*( numpy.abs(div_q*(pos-focal_q+0.5*tf_length)) + theoretical_focal_size )
out[0,i] = tf_fs_before+tf_length+pos
out[1,i] = -tf_fs_after+pos
out[2,i] = pos
out[3,i] = pos+0.5*tf_length
out[4,i] = pos+0.5*tf_length-focal_q
out[5,i] = fwhm1
out[6,i] = fwhm2
out[7,i] = fwhm_th2
f.write(" %18.3f %18.3f %18.3f %18.3f %18.3f %18.3f %18.3f %18.3f \n"%\
(tf_fs_before+tf_length+pos,\
-tf_fs_after+pos,\
pos,\
pos+0.5*tf_length,\
pos+0.5*tf_length-focal_q,\
fwhm1,fwhm2,fwhm_th2))
f.close()
print("File with beam evolution written to disk: id30b.spec")
#
# plots
#
itmp = input("Do you want to plot the intensity distribution and beam evolution? [1=yes,0=No]")
if str(itmp) != "1":
return
import matplotlib.pylab as plt
plt.figure(1)
plt.plot(out[1,:],out[5,:],'blue',label="fwhm shadow(stdev)")
plt.plot(out[1,:],out[6,:],'green',label="fwhm shadow(histo1)")
plt.plot(out[1,:],out[7,:],'red',label="fwhm theoretical")
plt.xlabel("Distance from image plane [cm]")
plt.ylabel("spot size [um] ")
ax = plt.subplot(111)
ax.legend(bbox_to_anchor=(1.1, 1.05))
print("Kill graphic to continue.")
plt.show()
Shadow.ShadowTools.histo1(beam,3,nbins=75,ref=1,nolost=1,calfwhm=1)
input("<Enter> to finish.")
return None
def id30b_full_simulation(photon_energy_ev=14000.0,s_target=20.0e-4,nlenses_target=None):
if nlenses_target == None:
force_nlenses = 0
else:
force_nlenses = 1
#
# define lens setup (general)
#
xrl_symbol = ["Be","Be","Be"]
xrl_density = [1.845,1.845,1.845]
lens_diameter = 0.05
nlenses_max = [15,3,1]
nlenses_radii = [500e-4,1000e-4,1500e-4]
sigmaz=6.46e-4
alpha = 0.55
tf_p = 5960 # position of the TF measured from the center of the transfocator
tf_q = 9760 - tf_p # position of the image plane measured from the center of the transfocator
if s_target < 2.35*sigmaz*tf_q/tf_p:
print("Source size FWHM is: %f um"%(1e4*2.35*sigmaz))
print("Maximum Demagnifications is: %f um"%(tf_p/tf_q))
print("Minimum possible size is: %f um"%(1e4*2.35*sigmaz*tf_q/tf_p))
print("Error: redefine size")
return
print("================================== TRANSFOCATOR INPUTS ")
print("Photon energy: %f eV"%(photon_energy_ev))
if force_nlenses:
print("Forced_nlenses: ",nlenses_target)
else:
print("target size: %f cm"%(s_target))
print("materials: ",xrl_symbol)
print("densities: ",xrl_density)
print("Lens diameter: %f cm"%(lens_diameter))
print("nlenses_max:",nlenses_max,"nlenses_radii: ",nlenses_radii)
print("Source size (sigma): %f um, FWHM: %f um"%(1e4*sigmaz,2.35*1e4*sigmaz))
print("Distances: tf_p: %f cm, tf_q: %f cm"%(tf_p,tf_q))
print("alpha: %f"%(alpha))
print("========================================================")
if force_nlenses != 1:
nlenses_target = transfocator_compute_configuration(photon_energy_ev,s_target,\
symbol=xrl_symbol,density=xrl_density,\
nlenses_max=nlenses_max, nlenses_radii=nlenses_radii, lens_diameter=lens_diameter, \
sigmaz=sigmaz, alpha=alpha, \
tf_p=tf_p,tf_q=tf_q, verbose=1)
(s_target,focal_f,focal_q,div_q) = \
transfocator_compute_parameters(photon_energy_ev, nlenses_target,\
symbol=xrl_symbol,density=xrl_density,\
nlenses_max=nlenses_max, nlenses_radii=nlenses_radii, \
lens_diameter=lens_diameter,\
sigmaz=sigmaz, alpha=alpha,\
tf_p=tf_p,tf_q=tf_q)
slots_max = [ 1, 2, 4, 8, 1, 2, 1] # slots
slots_on_off = transfocator_nlenses_to_slots(nlenses_target,nlenses_max=nlenses_max)
print("=============================== TRANSFOCATOR SET")
#print("deltas: ",deltas)
if force_nlenses != 1:
print("nlenses_target (optimized): ",nlenses_target)
else:
print("nlenses_target (forced): ",nlenses_target)
print("With these lenses we obtain: ")
print(" focal_f: %f cm"%(focal_f))
print(" focal_q: %f cm"%(focal_q))
print(" s_target: %f um"%(s_target*1e4))
print(" slots_max: ",slots_max)
print(" slots_on_off: ",slots_on_off)
print("==================================================")
# for theoretical calculations use the focal position and distances given by the target nlenses
itmp = input("Start SHADOW simulation? [1=yes,0=No]: ")
if str(itmp) != "1":
return
#=======================================================================================================================
# Inputs
#=======================================================================================================================
emittH = 3.9e-9
emittV = 10e-12
betaH = 35.6
betaV = 3.0
number_of_rays = 50000
nslots = len(slots_max)
slots_lens_thickness = [0.3 for i in range(nslots)] #total thickness of a single lens in cm
# for each slot, positional gap of the first lens in cm
slots_steps = [ 4, 4, 1.9, 6.1, 4, 4, slots_lens_thickness[-1]]
slots_radii = [.05, .05, .05, .05, 0.1, 0.1, 0.15] # radii of the lenses in cm
AAA= 333
id30b_ray_tracing(emittH=emittH,emittV=emittV,betaH=betaH,betaV=betaV,number_of_rays=number_of_rays,\
density=xrl_density[0],symbol=xrl_symbol[0],tf_p=tf_p,tf_q=tf_q,lens_diameter=lens_diameter,\
slots_max=slots_max,slots_on_off=slots_on_off,photon_energy_ev=photon_energy_ev,\
slots_lens_thickness=slots_lens_thickness,slots_steps=slots_steps,slots_radii=slots_radii,\
s_target=s_target,focal_f=focal_f,focal_q=focal_q,div_q=div_q)
def main():
# this performs the full simulation: calculates the optimum configuration and do the ray-tracing
itmp = input("Enter: \n 0 = optimization calculation only \n 1 = full simulation (ray tracing) \n?> ")
photon_energy_kev = float(input("Enter photon energy in keV: "))
s_target_um = float(input("Enter target focal dimension in microns: "))
if str(itmp) == "1":
id30b_full_simulation(photon_energy_ev=photon_energy_kev*1e3,s_target=s_target_um*1e-4,nlenses_target=None)
#id30b_full_simulation(photon_energy_ev=14000.0,s_target=20.0e-4,nlenses_target=[3,1,1])
else:
#this performs the calculation of the optimizad configuration
nlenses_optimum = transfocator_compute_configuration(photon_energy_kev*1e3,s_target_um*1e-4,\
symbol=["Be","Be","Be"], density=[1.845,1.845,1.845],\
nlenses_max = [15,3,1], nlenses_radii = [500e-4,1000e-4,1500e-4], lens_diameter=0.05, \
sigmaz=6.46e-4, alpha = 0.55, \
tf_p=5960, tf_q=3800, verbose=0 )
print("Optimum lens configuration is: ",nlenses_optimum)
if nlenses_optimum == None:
return
print("Activate slots: ",transfocator_nlenses_to_slots(nlenses_optimum,nlenses_max=[15,3,1]))
# this calculates the parameters (image size, etc) for a given lens configuration
(size, f, q_f, div) = transfocator_compute_parameters(photon_energy_kev*1e3, nlenses_optimum,\
symbol=["Be","Be","Be"], density=[1.845,1.845,1.845],\
nlenses_max = [15,3,1], nlenses_radii = [500e-4,1000e-4,1500e-4], lens_diameter=0.05, \
sigmaz=6.46e-4, alpha = 0.55, \
tf_p=5960, tf_q=3800 )
print("For given configuration ",nlenses_optimum," we get: ")
print(" size: %f cm, focal length: %f cm, focal distance: %f cm, divergence: %f rad: "%(size, f, q_f, div))
if __name__ == "__main__":
main()
|
mit
| 5,677,322,547,994,774,000 | 39.224299 | 162 | 0.584905 | false | 3.016001 | true | false | false |
jose187/gh_word_count
|
gh_word_count/__init__.py
|
1
|
2681
|
from ommit_words import list_ommited_words
from re import sub
import operator
class _input_list:
def __init__(self,list_TITLES):
self.list_TITLES = list_TITLES
self.list_remove = list_ommited_words()
def _word_count(self):
# these are all the words that are in the text
dict_words = {}
# now we go through each of the lines
for str_line in self.list_TITLES:
str_raw_line1 = sub('[^a-z0-9 ]','',str_line.lower())
list_line_words = str_raw_line1.split()
for str_word in list_line_words:
# check to see if its in the ommited word list
if str_word not in self.list_remove:
# create new key if it is not there yet
if str_word not in dict_words:
dict_words[str_word] = [1]
# add if is already there
elif str_word in dict_words:
dict_words[str_word].append(1)
sorted_x = sorted(dict_words.iteritems(),
key=operator.itemgetter(1),
reverse=True)
list_OUTPUT = []
for each_item in sorted_x:
int_COUNT = sum(each_item[1])
if int_COUNT > 1:
tup_ONE_COUNT = ('%s' % each_item[0],
'%d' % int_COUNT)
list_OUTPUT.append(tup_ONE_COUNT)
return list_OUTPUT
# gets the top x according to frequency
# returns list
def _get_top(self,int_TOP):
list_TOP_N = []
for str_WORD in self._word_count()[:int_TOP]:
list_TOP_N.append(str_WORD)
return list_TOP_N
# displays the count on the terminal
def _show_count(list_TUPS,entries=0):
if entries == 0:
int_TOP = len(list_TUPS)
else:
int_TOP = entries
print 'Count\tWord\n'
for tup_ITEM in list_TUPS[:int_TOP]:
print '%d\t%s' % (int(tup_ITEM[1]),str(tup_ITEM[0]))
# saves the count to csv file
def _save_counts(list_COUNTS,str_FILE_PATH,entries=0):
if entries == 0:
int_TOP = len(list_COUNTS)
else:
int_TOP = entries
list_OUTPUT = ['"Count","Word"']
for tup_ITEM in list_COUNTS[:int_TOP]:
str_OUTPUT = '%d,"%s"' % (int(tup_ITEM[1]),str(tup_ITEM[0]))
list_OUTPUT.append(str_OUTPUT)
fw_OUTPUT = open(str_FILE_PATH,'w')
fw_OUTPUT.write('\n'.join(list_OUTPUT))
fw_OUTPUT.close()
|
bsd-2-clause
| -7,451,965,439,748,880,000 | 30.916667 | 69 | 0.497576 | false | 3.723611 | false | false | false |
CSD-Public/stonix
|
src/tests/rules/unit_tests/zzzTestRuleDisableOpenSafeSafari.py
|
1
|
4752
|
###############################################################################
# #
# Copyright 2019. Triad National Security, LLC. All rights reserved. #
# This program was produced under U.S. Government contract 89233218CNA000001 #
# for Los Alamos National Laboratory (LANL), which is operated by Triad #
# National Security, LLC for the U.S. Department of Energy/National Nuclear #
# Security Administration. #
# #
# All rights in the program are reserved by Triad National Security, LLC, and #
# the U.S. Department of Energy/National Nuclear Security Administration. The #
# Government is granted for itself and others acting on its behalf a #
# nonexclusive, paid-up, irrevocable worldwide license in this material to #
# reproduce, prepare derivative works, distribute copies to the public, #
# perform publicly and display publicly, and to permit others to do so. #
# #
###############################################################################
'''
This is a Unit Test for Rule DisableOpenSafeSafari
Created on Jan 22, 2015
@author: dwalker
@change: 2015-02-25 - ekkehard - Updated to make unit test work
@change: 2016/02/10 roy Added sys.path.append for being able to unit test this
file as well as with the test harness.
'''
import unittest
import sys
sys.path.append("../../../..")
from src.tests.lib.RuleTestTemplate import RuleTest
from src.stonix_resources.CommandHelper import CommandHelper
from src.tests.lib.logdispatcher_mock import LogPriority
from src.stonix_resources.rules.DisableOpenSafeSafari import DisableOpenSafeSafari
class zzzTestRuleDisableOpenSafeSafari(RuleTest):
def setUp(self):
RuleTest.setUp(self)
self.rule = DisableOpenSafeSafari(self.config,
self.environ,
self.logdispatch,
self.statechglogger)
self.rulename = self.rule.rulename
self.rulenumber = self.rule.rulenumber
self.ch = CommandHelper(self.logdispatch)
self.dc = "/usr/bin/defaults"
self.path = "com.apple.Safari"
self.key = "AutoOpenSafeDownloads"
def tearDown(self):
pass
def runTest(self):
self.simpleRuleTest()
def setConditionsForRule(self):
'''This makes sure the intial report fails by executing the following
commands:
defaults write com.apple.Safari AutoOpenSafeDownloads -bool yes
:param self: essential if you override this definition
:returns: boolean - If successful True; If failure False
@author: dwalker
'''
success = False
cmd = [self.dc, "write", self.path, self.key, "-bool", "yes"]
self.logdispatch.log(LogPriority.DEBUG, str(cmd))
if self.ch.executeCommand(cmd):
success = self.checkReportForRule(False, True)
return success
def checkReportForRule(self, pCompliance, pRuleSuccess):
'''To see what happended run these commands:
defaults read com.apple.Safari AutoOpenSafeDownloads
:param self: essential if you override this definition
:param pCompliance:
:param pRuleSuccess:
:returns: boolean - If successful True; If failure False
@author: ekkehard j. koch
'''
success = True
self.logdispatch.log(LogPriority.DEBUG, "pCompliance = " + \
str(pCompliance) + ".")
self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \
str(pRuleSuccess) + ".")
cmd = [self.dc, "read", self.path, self.key]
self.logdispatch.log(LogPriority.DEBUG, str(cmd))
if self.ch.executeCommand(cmd):
output = self.ch.getOutputString()
return success
def checkFixForRule(self, pRuleSuccess):
self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \
str(pRuleSuccess) + ".")
success = self.checkReportForRule(True, pRuleSuccess)
return success
def checkUndoForRule(self, pRuleSuccess):
self.logdispatch.log(LogPriority.DEBUG, "pRuleSuccess = " + \
str(pRuleSuccess) + ".")
success = self.checkReportForRule(False, pRuleSuccess)
return success
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
gpl-2.0
| 6,575,902,741,037,396,000 | 42.2 | 82 | 0.580177 | false | 4.277228 | true | false | false |
HomeRad/TorCleaner
|
wc/filter/rules/FolderRule.py
|
1
|
3945
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2000-2009 Bastian Kleineidam
"""
Group filter rules into folders.
"""
from ... import fileutil, configuration
from . import Rule
def recalc_up_down(rules):
"""
Add .up and .down attributes to rules, used for display up/down
arrows in GUIs
"""
upper = len(rules)-1
for i, rule in enumerate(rules):
rule.up = (i>0)
rule.down = (i<upper)
class FolderRule(Rule.Rule):
"""
Container for a list of rules.
"""
def __init__(self, sid=None, titles=None, descriptions=None,
disable=0, filename=""):
"""
Initialize rule data.
"""
super(FolderRule, self).__init__(sid=sid, titles=titles,
descriptions=descriptions, disable=disable)
# make filename read-only
self._filename = filename
self.rules = []
self.attrnames.extend(('oid', 'configversion'))
self.intattrs.append('oid')
self.oid = None
self.configversion = "-"
def __str__(self):
"""
Return rule data as string.
"""
return super(FolderRule, self).__str__() + \
("\nrules: %d" % len(self.rules))
def filename_get(self):
"""
Get filename where this folder is stored.
"""
return self._filename
filename = property(filename_get)
def append_rule(self, r):
"""
Append rule to folder.
"""
r.oid = len(self.rules)
# note: the rules are added in order
self.rules.append(r)
r.parent = self
def delete_rule(self, i):
"""
Delete rule from folder with index i.
"""
del self.rules[i]
recalc_up_down(self.rules)
def update(self, rule, dryrun=False, log=None):
"""
Update this folder with given folder rule data.
"""
chg = super(FolderRule, self).update(rule, dryrun=dryrun, log=log)
for child in rule.rules:
if child.sid is None or not child.sid.startswith("wc"):
# ignore local rules
continue
oldrule = self.get_rule(child.sid)
if oldrule is not None:
if oldrule.update(child, dryrun=dryrun, log=log):
chg = True
else:
print >> log, _("inserting new rule %s") % \
child.tiptext()
if not dryrun:
self.rules.append(child)
chg = True
if chg:
recalc_up_down(self.rules)
return chg
def get_rule(self, sid):
"""
Return rule with given sid or None if not found.
"""
for rule in self.rules:
if rule.sid == sid:
return rule
return None
def toxml(self):
"""
Rule data as XML for storing.
"""
s = u"""<?xml version="1.0" encoding="%s"?>
<!DOCTYPE folder SYSTEM "filter.dtd">
%s oid="%d" configversion="%s">""" % \
(configuration.ConfigCharset, super(FolderRule, self).toxml(),
self.oid, self.configversion)
s += u"\n"+self.title_desc_toxml()+u"\n"
for r in self.rules:
s += u"\n%s\n" % r.toxml()
return s+u"</folder>\n"
def write(self, fd=None):
"""
Write xml data into filename.
@raise: OSError if file could not be written.
"""
s = self.toxml().encode("iso-8859-1", "replace")
if fd is None:
fileutil.write_file(self.filename, s)
else:
fd.write(s)
def tiptext(self):
"""
Return short info for gui display.
"""
l = len(self.rules)
if l == 1:
text = _("with 1 rule")
else:
text = _("with %d rules") % l
return "%s %s" % (super(FolderRule, self).tiptext(), text)
|
gpl-2.0
| 716,819,241,840,942,200 | 27.79562 | 78 | 0.509759 | false | 3.937126 | true | false | false |
RAJSD2610/SDNopenflowSwitchAnalysis
|
TotalFlowPlot.py
|
1
|
2742
|
import os
import pandas as pd
import matplotlib.pyplot as plt
import seaborn
seaborn.set()
path= os.path.expanduser("~/Desktop/ece671/udpt8")
num_files = len([f for f in os.listdir(path)if os.path.isfile(os.path.join(path, f))])
print(num_files)
u8=[]
i=0
def file_len(fname):
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
while i<(num_files/2) :
# df+=[]
j=i+1
path ="/home/vetri/Desktop/ece671/udpt8/ftotal."+str(j)+".csv"
y = file_len(path)
# except: pass
#df.append(pd.read_csv(path,header=None))
# a+=[]
#y=len(df[i].index)-1 #1 row added by default so that table has a entry
if y<0:
y=0
u8.append(y)
i+=1
print(u8)
path= os.path.expanduser("~/Desktop/ece671/udpnone")
num_files = len([f for f in os.listdir(path)if os.path.isfile(os.path.join(path, f))])
print(num_files)
i=0
j=0
u=[]
while i<(num_files/2):
j=i+1
path ="/home/vetri/Desktop/ece671/udpnone/ftotal."+str(j)+".csv"
y = file_len(path)
# except: pass
#df.append(pd.read_csv(path,header=None))
# a+=[]
#y=len(df[i].index)-1 #1 row added by default so that table has a entry
if y<0:
y=0
u.append(y)
i+=1
print(u)
path= os.path.expanduser("~/Desktop/ece671/tcpnone")
num_files = len([f for f in os.listdir(path)if os.path.isfile(os.path.join(path, f))])
print(num_files)
i=0
j=0
t=[]
while i<(num_files/2):
j=i+1
path ="/home/vetri/Desktop/ece671/tcpnone/ftotal."+str(j)+".csv"
y = file_len(path)
# except: pass
#df.append(pd.read_csv(path,header=None))
# a+=[]
#y=len(df[i].index)-1 #1 row added by default so that table has a entry
if y<0:
y=0
t.append(y)
i+=1
print(t)
path= os.path.expanduser("~/Desktop/ece671/tcpt8")
num_files = len([f for f in os.listdir(path)if os.path.isfile(os.path.join(path, f))])
print(num_files)
i=0
j=0
t8=[]
while i<(num_files/2):
j=i+1
path ="/home/vetri/Desktop/ece671/tcpt8/ftotal."+str(j)+".csv"
y = file_len(path)
# except: pass
#df.append(pd.read_csv(path,header=None))
# a+=[]
#y=len(df[i].index)-1 #1 row added by default so that table has a entry
if y<0:
y=0
t8.append(y)
i+=1
print(t8)
#plt.figure(figsize=(4, 5))
plt.plot(list(range(1,len(u8)+1)),u8, '.-',label="udpt8")
plt.plot(list(range(1,len(u)+1)),u, '.-',label="udpnone")
plt.plot(list(range(1,len(t)+1)),t, '.-',label="tcpnone")
plt.plot(list(range(1,len(t8)+1)),t8, '.-',label="tcpt8")
plt.title("Total Flows Present after 1st flow")
plt.xlabel("time(s)")
plt.ylabel("flows")
#plt.frameon=True
plt.legend()
plt.show()
|
gpl-3.0
| 3,297,434,910,053,564,400 | 24.388889 | 86 | 0.591174 | false | 2.515596 | false | false | false |
cschenck/blender_sim
|
fluid_sim_deps/blender-2.69/2.69/scripts/addons/io_scene_3ds/__init__.py
|
1
|
6950
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
bl_info = {
"name": "Autodesk 3DS format",
"author": "Bob Holcomb, Campbell Barton",
"blender": (2, 57, 0),
"location": "File > Import-Export",
"description": "Import-Export 3DS, meshes, uvs, materials, textures, "
"cameras & lamps",
"warning": "",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/Autodesk_3DS",
"tracker_url": "",
"support": 'OFFICIAL',
"category": "Import-Export"}
if "bpy" in locals():
import imp
if "import_3ds" in locals():
imp.reload(import_3ds)
if "export_3ds" in locals():
imp.reload(export_3ds)
import bpy
from bpy.props import StringProperty, FloatProperty, BoolProperty, EnumProperty
from bpy_extras.io_utils import (ImportHelper,
ExportHelper,
axis_conversion,
)
class Import3DS(bpy.types.Operator, ImportHelper):
"""Import from 3DS file format (.3ds)"""
bl_idname = "import_scene.autodesk_3ds"
bl_label = 'Import 3DS'
bl_options = {'UNDO'}
filename_ext = ".3ds"
filter_glob = StringProperty(default="*.3ds", options={'HIDDEN'})
constrain_size = FloatProperty(
name="Size Constraint",
description="Scale the model by 10 until it reaches the "
"size constraint (0 to disable)",
min=0.0, max=1000.0,
soft_min=0.0, soft_max=1000.0,
default=10.0,
)
use_image_search = BoolProperty(
name="Image Search",
description="Search subdirectories for any associated images "
"(Warning, may be slow)",
default=True,
)
use_apply_transform = BoolProperty(
name="Apply Transform",
description="Workaround for object transformations "
"importing incorrectly",
default=True,
)
axis_forward = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default='Y',
)
axis_up = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default='Z',
)
def execute(self, context):
from . import import_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
))
global_matrix = axis_conversion(from_forward=self.axis_forward,
from_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return import_3ds.load(self, context, **keywords)
class Export3DS(bpy.types.Operator, ExportHelper):
"""Export to 3DS file format (.3ds)"""
bl_idname = "export_scene.autodesk_3ds"
bl_label = 'Export 3DS'
filename_ext = ".3ds"
filter_glob = StringProperty(
default="*.3ds",
options={'HIDDEN'},
)
use_selection = BoolProperty(
name="Selection Only",
description="Export selected objects only",
default=False,
)
axis_forward = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default='Y',
)
axis_up = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default='Z',
)
def execute(self, context):
from . import export_3ds
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"filter_glob",
"check_existing",
))
global_matrix = axis_conversion(to_forward=self.axis_forward,
to_up=self.axis_up,
).to_4x4()
keywords["global_matrix"] = global_matrix
return export_3ds.save(self, context, **keywords)
# Add to a menu
def menu_func_export(self, context):
self.layout.operator(Export3DS.bl_idname, text="3D Studio (.3ds)")
def menu_func_import(self, context):
self.layout.operator(Import3DS.bl_idname, text="3D Studio (.3ds)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_import.append(menu_func_import)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
# NOTES:
# why add 1 extra vertex? and remove it when done? -
# "Answer - eekadoodle - would need to re-order UV's without this since face
# order isnt always what we give blender, BMesh will solve :D"
#
# disabled scaling to size, this requires exposing bb (easy) and understanding
# how it works (needs some time)
if __name__ == "__main__":
register()
|
gpl-3.0
| -6,396,439,276,597,938,000 | 32.095238 | 79 | 0.496259 | false | 4.131986 | false | false | false |
HyperloopTeam/FullOpenMDAO
|
cantera-2.0.2/interfaces/python/MixMaster/Units/unit.py
|
1
|
2833
|
import operator
class unit:
_zero = (0,) * 7
_negativeOne = (-1, ) * 7
_labels = ('m', 'kg', 's', 'A', 'K', 'mol', 'cd')
def __init__(self, value, derivation):
self.value = value
self.derivation = derivation
return
def __add__(self, other):
if not self.derivation == other.derivation:
raise ImcompatibleUnits(self, other)
return unit(self.value + other.value, self.derivation)
def __sub__(self, other):
if not self.derivation == other.derivation:
raise ImcompatibleUnits(self, other)
return unit(self.value - other.value, self.derivation)
def __mul__(self, other):
if type(other) == type(0) or type(other) == type(0.0):
return unit(other*self.value, self.derivation)
value = self.value * other.value
derivation = tuple(map(operator.add, self.derivation, other.derivation))
return unit(value, derivation)
def __div__(self, other):
if type(other) == type(0) or type(other) == type(0.0):
return unit(self.value/other, self.derivation)
value = self.value / other.value
derivation = tuple(map(operator.sub, self.derivation, other.derivation))
return unit(value, derivation)
def __pow__(self, other):
if type(other) != type(0) and type(other) != type(0.0):
raise BadOperation
value = self.value ** other
derivation = tuple(map(operator.mul, [other]*7, self.derivation))
return unit(value, derivation)
def __pos__(self): return self
def __neg__(self): return unit(-self.value, self.derivation)
def __abs__(self): return unit(abs(self.value), self.derivation)
def __invert__(self):
value = 1./self.value
derivation = tuple(map(operator.mul, self._negativeOne, self.derivation))
return unit(value, derivation)
def __rmul__(self, other):
return unit.__mul__(self, other)
def __rdiv__(self, other):
if type(other) != type(0) and type(other) != type(0.0):
raise BadOperation(self, other)
value = other/self.value
derivation = tuple(map(operator.mul, self._negativeOne, self.derivation))
return unit(value, derivation)
def __float__(self):
return self.value
#if self.derivation == self._zero: return self.value
#raise BadConversion(self)
def __str__(self):
str = "%g" % self.value
for i in range(0, 7):
exponent = self.derivation[i]
if exponent == 0: continue
if exponent == 1:
str = str + " %s" % (self._labels[i])
else:
str = str + " %s^%d" % (self._labels[i], exponent)
return str
dimensionless = unit(1, unit._zero)
|
gpl-2.0
| 3,539,097,702,608,451,000 | 25.476636 | 81 | 0.570773 | false | 3.674449 | false | false | false |
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2016_03_30/models/windows_configuration_py3.py
|
1
|
2719
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class WindowsConfiguration(Model):
"""Specifies Windows operating system settings on the virtual machine.
:param provision_vm_agent: Indicates whether virtual machine agent should
be provisioned on the virtual machine. <br><br> When this property is not
specified in the request body, default behavior is to set it to true.
This will ensure that VM Agent is installed on the VM so that extensions
can be added to the VM later.
:type provision_vm_agent: bool
:param enable_automatic_updates: Indicates whether virtual machine is
enabled for automatic updates.
:type enable_automatic_updates: bool
:param time_zone: Specifies the time zone of the virtual machine. e.g.
"Pacific Standard Time"
:type time_zone: str
:param additional_unattend_content: Specifies additional base-64 encoded
XML formatted information that can be included in the Unattend.xml file,
which is used by Windows Setup.
:type additional_unattend_content:
list[~azure.mgmt.compute.v2016_03_30.models.AdditionalUnattendContent]
:param win_rm: Specifies the Windows Remote Management listeners. This
enables remote Windows PowerShell.
:type win_rm: ~azure.mgmt.compute.v2016_03_30.models.WinRMConfiguration
"""
_attribute_map = {
'provision_vm_agent': {'key': 'provisionVMAgent', 'type': 'bool'},
'enable_automatic_updates': {'key': 'enableAutomaticUpdates', 'type': 'bool'},
'time_zone': {'key': 'timeZone', 'type': 'str'},
'additional_unattend_content': {'key': 'additionalUnattendContent', 'type': '[AdditionalUnattendContent]'},
'win_rm': {'key': 'winRM', 'type': 'WinRMConfiguration'},
}
def __init__(self, *, provision_vm_agent: bool=None, enable_automatic_updates: bool=None, time_zone: str=None, additional_unattend_content=None, win_rm=None, **kwargs) -> None:
super(WindowsConfiguration, self).__init__(**kwargs)
self.provision_vm_agent = provision_vm_agent
self.enable_automatic_updates = enable_automatic_updates
self.time_zone = time_zone
self.additional_unattend_content = additional_unattend_content
self.win_rm = win_rm
|
mit
| 7,741,615,214,177,697,000 | 49.351852 | 180 | 0.670835 | false | 4.189522 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.