text stringlengths 38 1.54M |
|---|
from websocket_server import WebsocketServer
from lib.message import MessageDecoder
import configparser
# Called for every client connecting (after handshake)
def new_client(client, server):
print("New client connected and was given id %d" % client['id'])
# Called for every client disconnecting
def client_left(client, server):
print("Client(%d) disconnected" % client['id'])
# Called when a client sends a message
def message_received(client, server, message):
msg = MessageDecoder(message).to_hash()
msg['client'] = client
server.em.trigger(msg['event'], msg)
config = configparser.ConfigParser()
config.read('config.ini')
server = WebsocketServer(int(config['DEFAULT']['port']), config['DEFAULT']['host'])
server.set_fn_new_client(new_client)
server.set_fn_client_left(client_left)
server.set_fn_message_received(message_received)
server.run_forever()
|
# part 1
def draw_stars(list):
for num in list:
print(num*"*")
draw_stars([1,2,3,4,5])
#part 2
def draw_stars2(list):
for thing in list:
if type(thing) is int:
print(thing * "*")
else:
print(len(thing) * thing[:1])
draw_stars2([1,2,3,4,5,"thing"])
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from importlib import reload
from os import environ
from unittest import TestCase
from unittest.mock import Mock, patch
from opentelemetry.baggage.propagation import BaggagePropagator
from opentelemetry.configuration import Configuration
from opentelemetry.trace.propagation.tracecontext import (
TraceContextTextMapPropagator,
)
class TestPropagators(TestCase):
@patch("opentelemetry.propagators.composite.CompositeHTTPPropagator")
def test_default_composite_propagators(self, mock_compositehttppropagator):
def test_propagators(propagators):
propagators = {propagator.__class__ for propagator in propagators}
self.assertEqual(len(propagators), 2)
self.assertEqual(
propagators, {TraceContextTextMapPropagator, BaggagePropagator}
)
mock_compositehttppropagator.configure_mock(
**{"side_effect": test_propagators}
)
import opentelemetry.propagators
reload(opentelemetry.propagators)
@patch.dict(environ, {"OTEL_PROPAGATORS": "a,b,c"})
@patch("opentelemetry.propagators.composite.CompositeHTTPPropagator")
@patch("pkg_resources.iter_entry_points")
def test_non_default_propagators(
self, mock_iter_entry_points, mock_compositehttppropagator
):
Configuration._reset()
def iter_entry_points_mock(_, propagator):
return iter(
[
Mock(
**{
"load.side_effect": [
Mock(**{"side_effect": [propagator]})
]
}
)
]
)
mock_iter_entry_points.configure_mock(
**{"side_effect": iter_entry_points_mock}
)
def test_propagators(propagators):
self.assertEqual(propagators, ["a", "b", "c"])
mock_compositehttppropagator.configure_mock(
**{"side_effect": test_propagators}
)
import opentelemetry.propagators
reload(opentelemetry.propagators)
|
#!/usr/bin/python
import iplanet_5_2
import iplanet_5_2hf0_8
import iplanet_5_2p1
import iplanet_5_2hf1_02
import iplanet_5_2hf1_16
import iplanet_5_2hf1_21
import iplanet_5_2hf1_25
import iplanet_5_2p2
targets = \
[iplanet_5_2.iplanet_5_2,
iplanet_5_2hf0_8.iplanet_5_2hf0_8,
iplanet_5_2p1.iplanet_5_2p1,
iplanet_5_2hf1_02.iplanet_5_2hf1_02,
iplanet_5_2hf1_16.iplanet_5_2hf1_16,
iplanet_5_2hf1_21.iplanet_5_2hf1_21,
iplanet_5_2hf1_25.iplanet_5_2hf1_25,
iplanet_5_2p2.iplanet_5_2p2]
def factory(version):
for target in targets:
if (target.version == version):
return target()
return None
def list():
print "Supported versions:"
for target in targets:
print " %s" % target.version
def main():
list()
if __name__ == "__main__":
main()
|
from typing import (
Union,
)
from wasm.datatypes import (
ValType,
)
from .unknown import (
Unknown,
)
Operand = Union[ValType, Unknown]
|
#!/usr/bin/env python3
height, width, num_bricks = map(int, input().split())
bricks = [int(x) for x in input().split()]
c_h = 0 # current height
c_w = 0 # ... width
i = 0
while i < len(bricks) and (bricks[i] + c_w) <= width and c_h != height:
if c_w + bricks[i] == width:
c_w = 0
c_h += 1
else:
c_w += bricks[i]
i += 1
print(c_h == height and "YES" or "NO") |
from coffer.utils import getRootDir, text, isRoot, getArg
from coffer import remove
import shutil
import sys
import os
def createArchive(path, name):
print (text.creatingPackage)
shutil.make_archive(name, "tar", path)
def compress(path, name):
createArchive(path, name)
os.rename(name + ".tar", name + ".coffer")
def package():
rootDir = getRootDir.getEnvsDir()
env = getArg.getArg(0)
if not env:
sys.exit(text.packageHelper)
if not isRoot.isRoot():
sys.exit(text.notRoot)
if not remove.checkIfExists(rootDir + env):
sys.exit(text.envDoesntExist)
remove.unmountAll(rootDir + env)
compress(rootDir + env, env)
print(text.packaged)
|
import sys
import numpy as np
'''
习得:
1、矩阵比对字符串
2、创建矩阵数组-----None
n = np.arange(0, 30, 2)# start 0 step 2, stop before 30
n = n.reshape(3, 5) # reshape array to be 3x5
算方思想,利用伪矩阵比对两个字符串
'''
def getmaxStr(str1,str2):
len1=len(str1)
len2=len(str2)
sb=''
maxs=0 #最长公共字串的长度
maxI=0 #记录公共字串最后一个符号的位置
M=np.zeros((len1+1,len2+1))
i=0
while i<len1+1:
M[i][0]=0
i+=1
j=0
while j<len2+1:
M[j][0]=0
j+=1
#利用递归公式填写矩阵
i=1
while i<len1+1:
j=1
while j<len2+1:
if(list(str1)[i-1]==list(str2)[j-1]):
M[i][j]=M[i-1][j-1]+1
if(M[i][j]>maxs):
maxs=M[i][j]
maxI=i
else:
M[i][j]=0
j+=1
i+=1
# print(M)
#找出公共字串
print(maxs)
print(maxI)
i=int(maxI-maxs)
print(i)
while i<maxI:
sb+=str(str1[i])
# print(list(str1)[i])
i+=1
return sb
'''
改进
上个方法时间复杂度(m*n)空间复杂度(M*N)
改进后
时间复杂度((m+n)*n)空间复杂度(1)
'''
def getMaxStr(str1,str2):
len1=len(str1)
len2=len(str2)
sb=''
maxlen=0
tempMaxlen=0
maxLenEnd=0
i=0
while i<len1+len2:
s1_begin=s2_begin=0
tempMaxlen=0
if i<len1:
s1_begin=len1-i
else:
s2_begin=i-len1
j=0
print(str(i)+'-------------------------------------------------------------')
while (s1_begin+j<len1)and (s2_begin+j<len2):
print('s1_begin+j:'+str(s1_begin+j)+' s2_begin+j:'+str(s2_begin+j)+' j:'+str(j))
print(list(str1)[s1_begin+j]+'***'+list(str2)[s2_begin+j])
if(list(str1)[s1_begin+j]==list(str2)[s2_begin+j]):
tempMaxlen+=1
else:
if (tempMaxlen>maxlen):
maxlen=tempMaxlen
maxLenEnd=s1_begin+j
else:
tempMaxlen=0
j+=1
if tempMaxlen>maxlen:
maxlen=tempMaxlen
maxLenEnd=s1_begin+j
i+=1
i=maxLenEnd-maxlen
while i<maxLenEnd:
sb+=list(str1)[i]
i+=1
return sb
print(getMaxStr('asde','deo'))
|
def fact():
c = 1
n = int(input("Enter the number to find factorial...!"))
if (n == 0):
print("The factorial is 1)")
elif (n < 0):
print("Enter a positive integer")
else:
for a in range(n):
a = a + 1
c = c*a
print (a, "!= ", c)
fact()
|
import smtplib
from email.mime.text import MIMEText
# 1建立连接
smtp = smtplib.SMTP("smtp.163.com")
# 2 登录
smtp.login("18137128152@163.com","******")
# 3 发送邮件
sender = "18137128152@163.com"
recever = "zhangzhaoyu@qikux.com"
message = MIMEText("这是一封使用<h1>Python</h1>写的邮件",_subtype="html")
message["from"] = sender
message["to"] = "aaaaa@qqq.com"+","+"496575233@qq.com"
message["subject"] = "学会发邮件"
smtp.sendmail(sender,[recever, "496575233@qq.com"],message.as_string())
# 4 退出
smtp.quit() |
from TwitterSearch import *
# User credentials to access Twitter API
ACCESS_TOKEN = '4364945415-Ez38de5EYcRmEYIbtUsS8LDy0LhZwypoogypXjD'
ACCESS_TOKEN_SECRET = 'mCM3IF1Aele7WogZqmLWxEOaU9G1sV6s1MHIPxHlUKyXr'
CONSUMER_KEY = '5JoigYcA2Mzb9tA1DGQPAQroi'
CONSUMER_SECRET = 'ZDTJ14yIKHyDLeLlWXDNNAEDzOCg6nHz8z9c6eISFHLZ2oDBkV'
# SEARCHING TWITTER
# Use a try-except condition to avoid things breaking down because of errors
try:
tso = TwitterSearchOrder()
tso.set_keywords(['man in the high castle'])
tso.set_language('en')
tso.set_include_entities(False)
ts = TwitterSearch(
consumer_key=CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET,
access_token=ACCESS_TOKEN,
access_token_secret=ACCESS_TOKEN_SECRET
)
for tweet in ts.search_tweets_iterable(tso):
print("@%s tweeted: %s" % (tweet['user']['screen_name'], tweet['text']))
except TwitterSearchException as e:
print(e)
# ACCESSING USER TIMELINES
# Basically works the same as before
try:
tuo = TwitterUserOrder('theApost8te')
# Syntax is exactly the same as before, except using a TwitterUserOrder object instead of a TwitterSearchOrder
# object
ts = TwitterSearch(
consumer_key=CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET,
access_token=ACCESS_TOKEN,
access_token_secret=ACCESS_TOKEN_SECRET
)
# This will print all of my tweets
for tweet in ts.search_tweets_iterable(tuo):
print("@%s tweeted: %s" % (tweet['user']['screen_name'], tweet['text']))
except TwitterSearchException as e:
print(e)
|
# -*- coding: utf-8 -*-
import xmpp, inspect, re
import ConfigParser
class bot:
def DEBUG(self, text=None):
'''Режим отладки и тестирования'''
if self.debug:
self.config_file = 'nia_test.cfg'
print unicode(text)
comm_pref = 'nia_'
admin_comm_pref = 'admin_'
def __init__(self):
self.debug = 0
self.logging = 0
self.config_file = 'nia.cfg'
self.resource= 'Nia Teppelin .NET'
self.version = '0.666'
self.os = 'Windows Vista'
self.DEBUG()
user, confs, ignore, alias = self.config(False)
self.JID = user['jid']
self.PASSWD = user['passwd']
self.NICK= unicode(user['nick'],'utf-8')
self.admin = xmpp.protocol.JID(user['admin'])
self.CONFS = confs
self.ignore = ignore
self.alias = alias
self.commands = {}
self.admin_commands = {}
self.help = {'com':[],'admin':[]}
for (name, value) in inspect.getmembers(self):
if inspect.ismethod(value) and name.startswith(self.comm_pref):
self.commands[name[len(self.comm_pref):]] = value
self.help['com'].append(name[len(self.comm_pref):])
if inspect.ismethod(value) and name.startswith(self.admin_comm_pref):
self.admin_commands[name[len(self.admin_comm_pref):]] = value
self.help['admin'].append(name[len(self.admin_comm_pref):])
self.help = {'com':', '.join(self.help['com']),'admin':', '.join(self.help['admin'])}
def config(self,flag,confs=None,ignore=None):
config = ConfigParser.ConfigParser()
def config_write():
config.add_section('alias')
for key in self.alias:
config.set('alias', key, self.alias[key])
config.add_section('general')
config.set('general', 'jid', self.JID)
config.set('general', 'passwd', self.PASSWD)
config.set('general', 'nick', self.NICK)
config.set('general', 'admin', self.admin)
config.set('general', 'ignore', ','.join(ignore))
config.set('general', 'confs', ','.join(confs) )
config.write(open(self.config_file,'w'))
def config_read():
alias = {}
config.read(self.config_file)
user = {'jid':config.get('general','jid'),
'passwd':config.get('general','passwd'),
'nick':config.get('general','nick'),
'admin':config.get('general','admin')}
confs = config.get('general','confs').decode('utf-8').split(',')
ignore = config.get('general','ignore').decode('utf-8').split(',')
for key in config.options('alias'):
alias[key] = config.get('alias',key)
return user, confs, ignore, alias
if flag:
config_write()
else:
return config_read()
def connect(self):
'''Подключение к серверу'''
self.jid = xmpp.protocol.JID(self.JID)
self.conn=xmpp.Client(self.jid.getDomain(),debug=[])
self.conn.connect()
self.conn.auth(self.jid.getNode(),self.PASSWD,'nyaa~')
self.conn.sendInitPresence()
self.conn.RegisterDisconnectHandler(self.conn.reconnectAndReauth)
self.conn.RegisterHandler('message',self.get_mes)
self.conn.RegisterHandler('iq', self.iq_version, typ='get', ns=xmpp.NS_VERSION)
self.conn.RegisterHandler('iq', self.get_iq, typ='result', ns=xmpp.NS_VERSION)
def iq_version(self, conn, iq):
"""Returns reply to iq:version"""
iq=iq.buildReply('result')
qp=iq.getTag('query')
qp.setTagData('name', self.resource)
qp.setTagData('version', self.version)
qp.setTagData('os', self.os)
conn.send(iq)
raise xmpp.NodeProcessed
def join_room(self, confs):
for conf in confs:
self.p=xmpp.Presence(to='%s/%s'%(conf,self.NICK))
self.p.setTag('Nia',namespace=xmpp.NS_MUC).setTagData('password','')
self.p.getTag('Nia').addChild('history',{'maxchars':'0','maxstanzas':'0'})
self.conn.send(self.p)
def leave_room(self, confs):
for conf in confs:
to = '%s/%s'%(conf,self.NICK)
self.send_system(to,'offline','unavailable')
def reconnect(self):
self.connect()
self.join_room(self.CONFS)
def online(self):
self.connect()
self.join_room(self.CONFS)
while True:
try:
self.conn.Process(1)
except xmpp.protocol.XMLNotWellFormed:
self.reconnect()
def send(self, text, extra=None, flag=True):
'''
True - chat
False - xml
'''
if flag:
self.conn.send(xmpp.protocol.Message(self.to,text,self.type))
else:
'''Отправка сообщения в форме xhtml'''
xhtml = '''
<html xmlns='http://jabber.org/protocol/xhtml-im'>
<body xml:lang='en-US' xmlns='http://www.w3.org/1999/xhtml'>
%s
</body></html>
'''%extra
self.conn.send("<message to='%s' type='%s'><body>%s</body>%s</message>"%(self.to,self.type,text,xhtml))
def send_system(self,to,msg,type):
'''Отправка системного сообщения. Статусы'''
print to, msg, type
self.conn.send(xmpp.protocol.Presence(to=to,status=msg,typ=type))
def XMLescape(self, text):
return xmpp.simplexml.XMLescape(text)
def get_mes(self, conn, mess):
def parse():
if self.type_f:
text = re.findall('^%s[\W]{0,2}[\s]{1,3}(.*?)$'%self.NICK,self.text)
else:
text = re.findall('^(.*?)$',self.text)
self.DEBUG(text)
if text:
tmp = text[0].split(' ',1)
if len(tmp) >= 2: cmd, args = tmp[0], tmp[1]
elif len(tmp) == 1: cmd, args = tmp[0], ''
return cmd, args
else: return False, False
def alias(cmd, args):
text = ' '.join( (self.alias[cmd], args))
tmp = text.split(' ',1)
if len(tmp) >= 2: cmd, args = tmp[0], tmp[1]
elif len(tmp) == 1: cmd, args = tmp[0], ''
return cmd, args
self.type=mess.getType()
self.nick=mess.getFrom()
self.text=mess.getBody()
nick = self.nick.getResource()
if self.type == 'groupchat':
self.to = self.nick.getStripped()
nick = self.nick.getResource()
self.type_f = True
elif self.type == 'chat' and self.nick.getDomain().startswith('conference.'):
self.to = self.nick
nick = self.nick.getResource()
self.type_f = False
elif self.type == 'chat':
self.to = self.nick.getStripped()
nick = self.nick.getNode()
self.type_f = False
if self.type_f:
self.LOG(self.to, nick, self.text)
self.DEBUG([self.nick,self.text,self.type])
self.DEBUG(mess)
if self.ignore.count(self.nick) or re.match('%s/%s'%(self.to,self.NICK),'%s/%s'%(self.to,nick) ):
pass
elif self.text.startswith(self.NICK) or not self.type_f:
cmd, args = parse()
if self.alias.has_key(cmd):
cmd,args = alias(cmd, args)
if cmd:
if self.commands.has_key(cmd):
self.commands[cmd](args)
elif self.admin_commands.has_key(cmd):
if nick == self.admin.getNode() or self.to == str(self.admin).lower() :
self.admin_commands[cmd](self.nick,args)
else: self.send('%s~ nyaaa? Access denied...'%nick)
else: self.send('%s~ nyaaa? Type "help"...'%nick)
else: self.send('%s~ nyaaa? Type "help"...'%nick)
def send_iq(self,_type, to):
self.conn.send(xmpp.protocol.Iq(to=to,typ=_type ,queryNS=xmpp.NS_VERSION))
def get_iq(self,conn,mess):
query = mess.getTag('query')
client = '%s %s'%(query.getTagData('name'),query.getTagData('version') )
os = query.getTagData('os')
target = mess.getFrom().getResource()
toversion = '%s has client %s at %s'%(target, client, os)
self.send(toversion)
'''
http://code.google.com/p/robocat/source/browse/trunk/start.py
http://www.linux.org.ru/view-message.jsp?msgid=2591531#2591657
'''
|
import boto3
from pprint import pprint
import pathlib
def upload_file_using_client():
"""
Uploads file to S3 bucket using S3 client object
:return: None
"""
s3 = boto3.client("s3")
bucket_name = "binary-guy-frompython-1"
object_name = "sample1.txt"
file_name = f"{pathlib.Path(__file__).parent.resolve()}\\sample_file.txt"
response = s3.upload_file(file_name, bucket_name, object_name)
pprint(response) # prints None
def upload_file_using_resource():
"""
Uploads file to S3 bucket using S3 resource object
:return: None
"""
s3 = boto3.resource("s3")
bucket_name = "binary-guy-frompython-2"
object_name = "sample2.txt"
file_name = f"{pathlib.Path(__file__).parent.resolve()}\\sample_file.txt"
bucket = s3.Bucket(bucket_name)
response = bucket.upload_file(file_name, object_name)
print(response) # Prints None
def upload_file_to_s3_using_put_object():
"""
Uploads file to s3 using put_object function of resource object.
Same function is available for s3 client object as well.
put_object function gives us much more options and we can set object access policy, tags, encryption etc
:return: None
"""
s3 = boto3.resource("s3")
bucket_name = "binary-guy-frompython-2"
object_name = "sample_using_put_object.txt"
file_name = f"{pathlib.Path(__file__).parent.resolve()}\\sample_file.txt"
bucket = s3.Bucket(bucket_name)
response = bucket.put_object(
ACL="private",
Body=file_name,
ServerSideEncryption="AES256",
Key=object_name,
Metadata={"env": "dev", "owner": "binary guy"},
)
print(
response
) # prints s3.Object(bucket_name='binary-guy-frompython-2', key='sample_using_put_object.txt')
def upload_file_to_s3_using_file_object():
"""
Uploads to file to s3 using upload_fileobj function of s3 client object.
Similar function is available for s3 resource object as well.
In this case, instead of copying file, we open that file and copy data of that file to S3.
This can be useful when you want to make some validations on data before you copy that data to S3.
:return: None
"""
s3 = boto3.client("s3")
bucket_name = "binary-guy-frompython-1"
object_name = "sample_file_object.txt"
file_name = f"{pathlib.Path(__file__).parent.resolve()}\\sample_file.txt"
with open(file_name, "rb") as data:
s3.upload_fileobj(data, bucket_name, object_name)
if __name__ == "__main__":
upload_file_using_client()
upload_file_using_resource()
upload_file_to_s3_using_put_object()
upload_file_to_s3_using_file_object()
|
from hashmap_repeated_word.hashmap_repeated_word import *
def test_happy_path():
words = "Once upon a time, there was a brave princess who..."
assert repeated_word(words) == 'a'
def test_happy_path_v2():
words = "It was a queer, sultry summer, the summer they electrocuted the Rosenbergs, and I didn’t know what I was doing in New York..."
assert repeated_word(words) == 'summer'
def test_edge_case():
words = " "
assert repeated_word(words) == None
|
import pygame
from image_rect import ImageRect
from pygame.sprite import Group
from point import Point
class Maze:
RED = (255, 0, 0)
BRICK_SIZE = 3
def __init__(self, screen, mazefile, brickfile, orangeportalfile, blueportalfile, shieldfile, pointfile):
self.screen = screen
self.filename = mazefile
with open(self.filename, 'r') as f:
self.rows = f.readlines()
self.bricks = []
self.shields = []
self.portals = []
self.points = Group()
sz = Maze.BRICK_SIZE
self.brick = ImageRect(screen, brickfile, sz, sz)
self.shield = ImageRect(screen, shieldfile, sz, sz)
self.blueportal = ImageRect(screen, blueportalfile, 10 * sz, 20 * sz)
self.orangeportal = ImageRect(screen, orangeportalfile, 20 * sz, 10 * sz)
self.deltax = self.deltay = Maze.BRICK_SIZE
self.build(self.points, self.screen)
def __str__(self): return 'maze(' + self.filename + ')'
def build(self, points, screen):
r = self.brick.rect
rshield = self.shield.rect
rblue = self.blueportal.rect
rorange = self.orangeportal.rect
w, h = r.width, r.height
dx, dy = self.deltax, self.deltay
index = 0
for nrow in range(len(self.rows)):
row = self.rows[nrow]
for ncol in range(len(row)):
col = row[ncol]
if col == 'X':
self.bricks.append(pygame.Rect(ncol * dx, nrow * dy, w, h))
elif col == 's':
self.shields.append(pygame.Rect(ncol * dx, nrow * dy, rshield.width, rshield.height))
elif col == 'o':
self.orangeportal.rect = pygame.Rect(dx + 12, (nrow - 6) * dy, rorange.width, rorange.height)
elif col == 'b':
self.blueportal.rect = pygame.Rect((ncol - 12) * dx, (nrow - 6) * dy, rblue.width, rblue.height)
elif col == 'p':
point = Point(screen)
point.x = ncol * dx
point.y = nrow * dy
point.rect.x = point.x
point.rect.y = point.y
points.add(point)
def blitme(self):
for rect in self.bricks:
self.screen.blit(self.brick.image, rect)
for rect in self.shields:
self.screen.blit(self.shield.image, rect)
self.orangeportal.blit()
self.blueportal.blit()
for point in self.points:
point.blit()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2020/1/27 16:25
# @File : 1.二叉树遍历.py
# ----------------------------------------------
# ☆ ☆ ☆ ☆ ☆ ☆ ☆
# >>> Author : Alex
# >>> QQ : 2426671397
# >>> Mail : alex18812649207@gmail.com
# >>> Github : https://github.com/koking0
# ☆ ☆ ☆ ☆ ☆ ☆ ☆
class Node:
def __init__(self, value=None):
self.value, self.left, self.right = value, None, None
def preOrderRecursive(self):
if self is None:
return
print(self.value, end=' ')
if self.left:
self.left.preOrderRecursive()
if self.right:
self.right.preOrderRecursive()
def inOrderRecursive(self):
if self is None:
return
if self.left:
self.left.inOrderRecursive()
print(self.value, end=' ')
if self.right:
self.right.inOrderRecursive()
def posOrderRecursive(self):
if self is None:
return
if self.left:
self.left.posOrderRecursive()
if self.right:
self.right.posOrderRecursive()
print(self.value, end=' ')
def preOrderLoop(self):
print("pre order loop: ", end='')
stack = [self]
while stack:
temp = stack.pop()
print(temp.value, end=' ')
if temp.right is not None:
stack.append(temp.right)
if temp.left is not None:
stack.append(temp.left)
print()
def inOrderLoop(self):
print("in order loop: ", end='')
if self is not None:
stack = []
temp = self
while stack or temp is not None:
if temp is not None:
stack.append(temp)
temp = temp.left
else:
temp = stack.pop()
print(temp.value, end=' ')
temp = temp.right
print()
def posOrderLoop(self):
"""
前序遍历的次序是:中左右
后续遍历的次序是:左右中
相当于将前序遍历中左右的顺序颠倒再倒着打印
"""
print("pos order loop: ", end='')
stack1 = []
if self is not None:
stack2 = [self]
while stack2:
temp = stack2.pop()
stack1.append(temp)
if temp.left is not None:
stack2.append(temp.left)
if temp.right is not None:
stack2.append(temp.right)
while stack1:
print(stack1.pop().value, end=' ')
if __name__ == '__main__':
head = Node(1)
head.left = Node(2)
head.right = Node(3)
head.left.left = Node(4)
head.left.right = Node(5)
head.right.left = Node(6)
head.right.right = Node(7)
print("==============recursive==============")
print("pre order recursive: ", end='')
head.preOrderRecursive()
print()
print("in order recursive: ", end='')
head.inOrderRecursive()
print()
print("pos order recursive: ", end='')
head.posOrderRecursive()
print()
print("============no recursive=============")
head.preOrderLoop()
head.inOrderLoop()
head.posOrderLoop()
|
def f(a):
return [a[i:i+3] for i in range(0,len(a),3)] #сделал функцию, дробящую значения в списке на вложенные списки
#функция работает на срезах списка. каждый раз в наш список на вывод добавляется вложенный список, состоящий
#из трех элементов основного списка. если не будет хватать элементов - сделает вложенный список с меньшим числом элементов
#-------------------------------------------------------------------------------------------------------------------------------
k = 0 #количество чисел
m = 0 #максимальное число
for x in range(1000000,1020001): # Перебираем числа в нашем диапазоне.
sq = int(x ** 0.5) # Целая часть квадратного корня числа
d = set() # Множество, куда мы будем забивать делители. Множество потому-что в нем не может быть повторений.
for i in range(2, sq + 1): # Перебираем делители.
if x % i == 0: # Если мы нашли таковые, то добавляем 2 числа. Само i и x // i в пару.
d.add(i)
d.add(x // i)
d = list(map(lambda x: sum(x), [x for x in f(sorted(d)) if len(x) == 3])) #ПЕРЕД ДАЛЬНЕЙШИМ ИЗУЧЕНИЕМ ПРОГРАММЫ ЧИТАТЬ ВНИЗУ
if len(d) == 63: #если длина обновленного списка d равна семи, прибавляю единичку к количеству
print(x,max(d))
"d = list(map(lambda x: sum(x), [x for x in f(sorted(d)) if len(x) == 3]))"
#я дроблю список делителей функцией f(d), фильтруя те элементы списка, длина которых меньше трех
#при помощи функции map() я применяю ко всем элементам списка лямбда-функцию, заставляя все вложенные списки сложиться
#т.к мы получили итератор, который просто так не прочитать, я превращаю его в список |
# Generated by Django 2.2.1 on 2019-05-12 09:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0006_auto_20190508_0758'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=80, verbose_name='Full Neme')),
('email', models.EmailField(max_length=254, verbose_name='Email')),
('message', models.TextField()),
('timestamp', models.DateTimeField(auto_now=True)),
],
),
]
|
# Generated by Django 2.2.5 on 2019-10-14 18:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0008_report'),
]
operations = [
migrations.AddField(
model_name='report',
name='cmnt',
field=models.CharField(blank=True, max_length=20, null=True),
),
migrations.AlterField(
model_name='report',
name='post',
field=models.CharField(blank=True, max_length=20, null=True),
),
migrations.AlterField(
model_name='report',
name='user',
field=models.CharField(blank=True, max_length=20, null=True),
),
]
|
# -*- coding: utf-8 -*-
from pytest_solr.factories import solr_core
from pytest_solr.factories import solr
import pytest
substring_match = solr_core('solr_process', 'substring_match')
solr = solr('substring_match')
def test_exact_term_match(solr):
solr.add([{'id': '1', 'title': 'bananas'}])
assert 1 == solr.search('title:bananas').hits
def test_prefix_match(solr):
solr.add([{'id': '1', 'title': 'bananas'}])
assert 1 == solr.search('title:ban').hits
def test_multiple_terms(solr):
solr.add([{'id': '1', 'title': 'bananas and oranges'}])
assert 1 == solr.search('title:bananas').hits
assert 1 == solr.search('title:oranges').hits
@pytest.mark.skip(reason='not implemented')
def test_suffix_match(solr):
solr.add([{'id': '1', 'title': 'bananas'}])
assert 1 == solr.search('title:nas').hits
def test_search_ignores_lowercase(solr):
solr.add([{'id': '1', 'title': 'Bananas'}])
assert 1 == solr.search('title:bananas').hits
def test_search_ignores_uppercase(solr):
solr.add([{'id': '1', 'title': 'bananas'}])
assert 1 == solr.search('title:Bananas').hits
def test_synonyms_apples_and_bananas_are_fruits(solr):
solr.add([
{'id': '1', 'title': 'bananas'},
{'id': '2', 'title': 'apples'}
])
assert 2 == solr.search('title:fruits').hits
def test_synonyms_fruits_are_not_apples(solr):
solr.add([{'id': '1', 'title': 'fruits'}])
assert 0 == solr.search('title:apples').hits
def test_search_ignores_stopwords(solr):
solr.add([{'id': '1', 'title': 'apples and bananas'}])
assert 0 == solr.search('title:and').hits
def test_search_ignores_punctuation(solr):
solr.add([
{'id': '1', 'title': 'Colorless, Green; Ideas. Sleep? Furiously!'}
])
assert 1 == solr.search('title:Colorless Green Ideas Sleep Furiously').hits
def test_search_replaces_non_ascii_characters(solr):
solr.add([{'id': '1', 'title': u'Cölorless Grêen Idéaß Slèep Furiously'}])
assert 1 == solr.search('title:Colorless Green Ideass Sleep Furiously').hits # noqa
# def test_search_ignores_whitespace(solr):
# index = ' Colorless Green Ideas Sleep Furiously '
# query = 'Colorless Green Ideas Sleep Furiously'
# def test_search_ignores_inner_whitespace(solr):
# index = 'Colorless Green Ideas Sleep Furiously'
# query = 'Colorless Green Ideas Sleep Furiously'
# def test_substring_finds_prefix_in_phrase(solr):
# solr.add([{
# 'id': '1',
# 'substring_match': 'Colorless Green Ideas Sleep Furiously',
# }])
# result = solr.search(
# 'substring_match:"Color"'
# )
# assert 1 == result.hits
# assert u'Colorless Green Ideas Sleep Furiously' == \
# [x.get('substring_match') for x in result][0]
# def test_substring_match_does_not_find_prefix_in_search(solr):
# """When N-grams are created during search. The result contains elements for
# all possible substrings of the search query. This is not what the user
# would expect.
# """
# solr.add([{
# 'id': '1',
# 'substring_match': 'Colorless Green Ideas Sleep Furiously',
# }])
# solr.add([{
# 'id': '2',
# 'substring_match': 'Color',
# }])
# result = solr.search(
# 'substring_match:"Colorless"'
# )
# assert 1 == result.hits
# assert u'Colorless Green Ideas Sleep Furiously' == \
# [x.get('substring_match') for x in result][0]
|
def is_prime(a: int) -> bool:
if a == 1:
return False
for i in range(2, a):
if a % i == 0:
return False
return True
def is_very_prime(a: int) -> bool:
if 0 < a and a < 10:
return a in (2, 3, 5, 7)
return is_prime(a) and is_very_prime(a // 10)
n = int(input())
for i in range(10 ** (n - 1), 10 ** n):
if is_very_prime(i):
print(i)
# instead of line 12 we can use:
# if n in (2, 3, 5, 7):
# return True
# return False |
import time
import redis
import unittest
import numpy as np
from neochi.core.dataflow import data_types
from neochi.core.dataflow.notifications import test_base, ir_receiver
class TestStartedIrReceiving(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StartedIrReceiving
valid_test_data = [{'published': 1, 'subscribed': 1}]
class TestStoppedIrReceivingNoSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrReceivingNoSignal
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrReceivingInvalidSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrReceivingInvalidSignal
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrReceivingValidSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrReceivingValidSignal
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrReceivingStopMessage(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrReceivingStopMessage
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrReceivingMoreSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrReceivingMoreSignal
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrSaving(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrSaving
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedIrSavingError(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedIrSavingError
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedDiscarding(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedDiscarding
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStoppedDiscardingError(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StoppedDiscardingError
valid_test_data = [{'published': 'Discarding error occurred', 'subscribed': 'Discarding error occurred'}]
class TestStartIrReceiving(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StartIrReceiving
valid_test_data = [{'published': None, 'subscribed': None}]
class TestStopIrReceiving(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.StopIrReceiving
valid_test_data = [{'published': None, 'subscribed': None}]
class TestSaveIrSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.SaveIrSignal
valid_test_data = [{'published': {'id': 0, 'name': 'TV Remote', 'sleep': 500}, 'subscribed': {'id': 0, 'name': 'TV Remote', 'sleep': 500}}]
class TestDiscardIrSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.DiscardIrSignal
valid_test_data = [{'published': None, 'subscribed': None}]
class TestDeleteIrSignal(test_base.BaseTestNotification, unittest.TestCase):
notification_cls = ir_receiver.DeleteIrSignal
valid_test_data = [{'published': '0', 'subscribed': '0'}] |
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = [
url('^admin/', include(admin.site.urls)),
url(r'^', include('ipv6map.geodata.urls')),
url(r'^$', TemplateView.as_view(template_name="app.html"), name="app"),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS: # pragma: nocover
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
# Examples:
# /root-url/service-name/action/object_id
# /customers/customers/show/24
# ordering of urls matters here...
url(r'(?P<model>\w+)/(?P<action>\w+)/(?P<id>\w+)$','crudstuff.views.index'),
url(r'(?P<model>\w+)/(?P<action>\w+)$','crudstuff.views.index'),
url(r'(?P<model>\w+)$','crudstuff.views.index' ),
url(r'^$', 'crudstuff.views.index', name='crudstuff_index'),
#url(r'^(?P<customer_id>\w+)/(?P<object_id>\w+)','digiportal_customers.views.main_index', name="main_index_view")
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
#url(r'^admin/', include(admin.site.urls)),
)
|
__author__ = 'Jason Crockett'
import os
from os.path import expanduser
UserHome = str(expanduser("~"))
print(UserHome) |
import six
import sys
sys.modules['sklearn.externals.six'] = six
import mlrose
from airportProblem import fitnessFunction, showFlights
#customFitness é pra problema personalizado
fitness = mlrose.CustomFitness(fitnessFunction)
#represetando o problema, DiscreteOpt porque estamos trabalhando com números int(10 voos por cidade), length é o tamanho da solução(12 voos)
#maximize false porque queremos o menores valores das passagens, max_val é no máximo ate 10(0 a 9)
problem = mlrose.DiscreteOpt(length = 12, fitness_fn = fitness, maximize = False, max_val = 10)
# retorna a melhor solução e o melhor custo, unico parametro obrigatório é a representação do problema
bestSoluction, bestCost = mlrose.hill_climb(problem)
print(bestSoluction, bestCost)
showFlights(bestSoluction) |
class Solution:
def reverse(self, x: int) -> int:
a = 0
negative = 1
if x < 0:
negative = -1
x = abs(x)
while x > 0:
a = a * 10 + x % 10
x = x // 10
if a > pow(2, 31):
return 0
return a * negative
def reverse_2(self, x):
str_x = str(x)
reverse_int = int(str_x[::-1]) if str_x[0] != '-' else -int(str_x[:0:-1])
if abs(reverse_int) >= 2**31:
return 0
return reverse_int
|
#!/usr/bin/env python
import gzip
import sys
def readConversionFiles(chromosome_accessions):
accession_to_chrom = {}
ip = open(chromosome_accessions, 'r')
for line in ip:
if (line[0] != '#'):
fields = line.strip().split("\t")
if (fields[0] == "MT"):
chrom = "chrM"
else:
chrom = "chr%s" % fields[0]
accession_to_chrom[fields[1]] = chrom
ip.close()
return accession_to_chrom
def convertGFF(input_gff, accession_to_chrom, output_gff):
ip = gzip.open(input_gff, 'rb')
op = gzip.open(output_gff, 'wb')
for line in ip:
if (line.startswith("##sequence-region")):
fields = line.split()
if (accession_to_chrom.has_key(fields[1])):
fields[1] = accession_to_chrom[fields[1]]
op.write("%s\n" % " ".join(fields))
elif (line[0] == '#'):
op.write(line)
else:
accession, rest_of_line = line.split("\t", 1)
if (accession_to_chrom.has_key(accession)):
chrom_name = accession_to_chrom[accession]
op.write("%s\t%s" % (chrom_name, rest_of_line))
ip.close()
op.close()
if (__name__ == "__main__"):
output_gff, input_gff, chromosome_accessions = sys.argv[1:]
accession_to_chrom = readConversionFiles(chromosome_accessions)
convertGFF(input_gff, accession_to_chrom, output_gff)
sys.exit(0)
|
import logging
from agendatrends.models.geo import USState
from agendatrends.models.people import Legislator
from agendatrends.models.politics import PoliticalParty
from sunlightapi import sunlight, SunlightApiError
from agendatrends.pipelines.services import ServicePipeline
from agendatrends.pipelines.services.google import NewsForQuery
class SunlightPipeline(ServicePipeline):
config = {
'api_key': '5716fd8eb1ce418095fe402c7489281e'
}
class SunlightLegislator(SunlightPipeline):
def run(self, legislator=False, **kwargs):
if legislator is False:
sunlight.apikey = self.config['api_key']
## Get legislator
legislator = sunlight.legislators.get(**kwargs)
logging.info('Getting legislator by ID: '+str(legislator.fec_id))
l = Legislator(key_name=legislator.fec_id)
legislator = legislator.__dict__
else:
l = Legislator(key_name=legislator['fec_id'])
## Map legislator properties
for key, value in legislator.items():
## Lookup state record
if str(key).lower() == 'state':
l.state = USState.get_by_key_name(legislator[key])
## Lookup party record
elif str(key).lower() == 'party':
l.party = PoliticalParty.get_by_key_name(str(legislator[key]).upper())
elif hasattr(l, key):
setattr(l, key, value)
l_key = l.put()
logging.info('Put legislator: '+str(legislator['fec_id'])+' at key '+str(l_key))
## Get their news
n = NewsForQuery(legislator['firstname']+' '+legislator['lastname']).start(queue_name='data')
return str(l_key)
class SunlightLegislators(SunlightPipeline):
def run(self, **kwargs):
sunlight.apikey = self.config['api_key']
## Get legislators
legislators = sunlight.legislators.getList(**kwargs)
## Spawn legislator pipelines
for legislator in legislators:
yield SunlightLegislator(legislator=legislator.__dict__) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import threading
from .inotify_buffer import InotifyBuffer
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
from watchdog.events import (
DirDeletedEvent,
DirModifiedEvent,
DirMovedEvent,
DirCreatedEvent,
FileDeletedEvent,
FileModifiedEvent,
FileMovedEvent,
FileCreatedEvent,
generate_sub_moved_events,
generate_sub_created_events,
)
from watchdog.utils import unicode_paths
class InotifyEmitter(EventEmitter):
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self._inotify = None
def on_thread_start(self):
path = unicode_paths.encode(self.watch.path)
self._inotify = InotifyBuffer(path, self.watch.is_recursive)
def on_thread_stop(self):
if self._inotify:
self._inotify.close()
def queue_events(self, timeout):
with self._lock:
event = self._inotify.read_event()
if event is None:
return
if isinstance(event, tuple):
move_from, move_to = event
src_path = self._decode_path(move_from.src_path)
dest_path = self._decode_path(move_to.src_path)
cls = DirMovedEvent if move_from.is_directory else FileMovedEvent
self.queue_event(cls(src_path, dest_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
self.queue_event(DirModifiedEvent(os.path.dirname(dest_path)))
if move_from.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_moved_events(src_path, dest_path):
self.queue_event(sub_event)
return
src_path = self._decode_path(event.src_path)
if event.is_moved_to:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
if event.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_created_events(src_path):
self.queue_event(sub_event)
elif event.is_attrib:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_modify:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_delete_self:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
elif event.is_delete or event.is_moved_from:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_create:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
def _decode_path(self, path):
""" Decode path only if unicode string was passed to this emitter. """
if isinstance(self.watch.path, bytes):
return path
return unicode_paths.decode(path)
class InotifyObserver(BaseObserver):
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=InotifyEmitter,
timeout=timeout)
|
from tensorflow import keras
from tensorflow import keras
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
img_rows, img_cols = 240, 256
number_of_actions = 7
def generate_model():
model = keras.models.Sequential([
keras.layers.Convolution2D(32, 8, 4, input_shape=(img_rows, img_cols, 3)),
keras.layers.Activation('relu'),
keras.layers.Flatten(),
keras.layers.Dense(256),
keras.layers.Activation('relu'),
keras.layers.Dense(number_of_actions, activation="softmax"),
])
model.compile(optimizer='adam',
loss='mse',
metrics=['accuracy'])
return model
"""
def generate_complex_model():
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x_middle = keras.layers.Conv2D(128, 8, 4, activation='relu')(img_inputs)
img_x = keras.layers.MaxPool2D(2)(img_x_middle)
img_x = keras.layers.Conv2D(32, 4, 2, activation='relu')(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(128, activation='relu')(img_x)
special_img = keras.layers.Conv2D(32, 5)(img_x_middle)
special_img = keras.layers.LeakyReLU(0.2)(special_img)
special_img = keras.layers.MaxPool2D(2)(special_img)
special_img = keras.layers.Conv2D(64, 3)(special_img)
special_img = keras.layers.LeakyReLU(0.2)(special_img)
special_img_outputs = keras.layers.Dense(32, activation='relu')(keras.layers.Flatten()(special_img))
part_img = keras.layers.Conv2D(32, 5, 2)(special_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.SpatialDropout2D(0.1)(part_img)
part_img = keras.layers.Conv2D(64, 3, 2)(part_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.SpatialDropout2D(0.1)(part_img)
part_img = keras.layers.Conv2D(64, 3, 2)(part_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.Dropout(0.2)(part_img)
part_img = keras.layers.Flatten()(part_img)
part_img_outputs = keras.layers.Dense(32, activation='relu')(part_img)
history_action_inputs = keras.Input(shape=(100, ), name="action")
history_action_x = keras.layers.Dense(2)(history_action_inputs)
history_action_outputs = keras.layers.Dense(64, activation="relu")(history_action_x)
history_x_location_inputs = keras.Input(shape=(100, ), name="x_position")
history_x_location_x = keras.layers.Dense(16)(history_x_location_inputs)
history_x_location_outputs = keras.layers.Dense(8, activation="relu")(history_x_location_x)
history_y_location_inputs = keras.Input(shape=(100, ), name="y_position")
history_y_location_x = keras.layers.Dense(16)(history_y_location_inputs)
history_y_location_outputs = keras.layers.Dense(8, activation="relu")(history_y_location_x)
x = keras.layers.concatenate([img_outputs, special_img_outputs, part_img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(512, activation="relu")(x)
y = keras.layers.Dense(number_of_actions, activation="softmax")(x)
model = keras.Model(
inputs=[img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
return model
"""
"""
def generate_complex_model():
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x = keras.layers.Conv2D(256, 5)(img_inputs)
img_x = keras.layers.LeakyReLU(0.2)(img_x)
img_x = keras.layers.MaxPool2D(2)(img_x)
#img_x = keras.layers.Conv2D(64, 3)(img_x)
#img_x = keras.layers.LeakyReLU(0.2)(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(64, activation='relu')(img_x)
history_action_inputs = keras.Input(shape=(32, ), name="action")
history_action_x = keras.layers.Dense(2)(history_action_inputs)
history_action_outputs = keras.layers.Dense(32, activation="relu")(history_action_x)
history_x_location_inputs = keras.Input(shape=(32, ), name="x_position")
history_x_location_x = keras.layers.Dense(16)(history_x_location_inputs)
history_x_location_outputs = keras.layers.Dense(8, activation="relu")(history_x_location_x)
history_y_location_inputs = keras.Input(shape=(32, ), name="y_position")
history_y_location_x = keras.layers.Dense(16)(history_y_location_inputs)
history_y_location_outputs = keras.layers.Dense(8, activation="relu")(history_y_location_x)
x = keras.layers.concatenate([img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(256, activation="relu")(x)
y = keras.layers.Dense(number_of_actions, activation="softmax")(x)
model = keras.Model(
inputs=[img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='mse',
#loss_weights=[1., 0.5, 0.2, 0.2],
metrics=['accuracy'])
return model
"""
def generate_complex_model():
action_inputs = keras.Input(shape=(1, ), name="action")
action_outputs = keras.layers.Dense(8, activation="relu")(action_inputs)
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x = keras.layers.Conv2D(64, 8, 4, activation='relu')(img_inputs)
img_x = keras.layers.MaxPool2D(2)(img_x)
img_x = keras.layers.Conv2D(32, 4, 2, activation='relu')(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(128, activation='relu')(img_x)
history_action_inputs = keras.Input(shape=(100, ), name="history_action")
history_action_outputs = keras.layers.Dense(32, activation="relu")(history_action_inputs)
history_x_location_inputs = keras.Input(shape=(100, ), name="history_x_position")
history_x_location_outputs = keras.layers.Dense(32, activation="relu")(history_x_location_inputs)
history_y_location_inputs = keras.Input(shape=(100, ), name="history_y_position")
history_y_location_outputs = keras.layers.Dense(32, activation="relu")(history_y_location_inputs)
x = keras.layers.concatenate([action_outputs, img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(512, activation="relu")(x)
y = keras.layers.Dense(1, activation="linear")(x)
model = keras.Model(
inputs=[action_inputs, img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='mse',
metrics=['accuracy'])
return model
if __name__ == "__main__":
generate_complex_model()
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
img_rows, img_cols = 240, 256
number_of_actions = 7
def generate_model():
model = keras.models.Sequential([
keras.layers.Convolution2D(32, 8, 4, input_shape=(img_rows, img_cols, 3)),
keras.layers.Activation('relu'),
keras.layers.Flatten(),
keras.layers.Dense(256),
keras.layers.Activation('relu'),
keras.layers.Dense(number_of_actions, activation="softmax"),
])
model.compile(optimizer='adam',
loss='mse',
metrics=['accuracy'])
return model
"""
def generate_complex_model():
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x_middle = keras.layers.Conv2D(128, 8, 4, activation='relu')(img_inputs)
img_x = keras.layers.MaxPool2D(2)(img_x_middle)
img_x = keras.layers.Conv2D(32, 4, 2, activation='relu')(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(128, activation='relu')(img_x)
special_img = keras.layers.Conv2D(32, 5)(img_x_middle)
special_img = keras.layers.LeakyReLU(0.2)(special_img)
special_img = keras.layers.MaxPool2D(2)(special_img)
special_img = keras.layers.Conv2D(64, 3)(special_img)
special_img = keras.layers.LeakyReLU(0.2)(special_img)
special_img_outputs = keras.layers.Dense(32, activation='relu')(keras.layers.Flatten()(special_img))
part_img = keras.layers.Conv2D(32, 5, 2)(special_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.SpatialDropout2D(0.1)(part_img)
part_img = keras.layers.Conv2D(64, 3, 2)(part_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.SpatialDropout2D(0.1)(part_img)
part_img = keras.layers.Conv2D(64, 3, 2)(part_img)
part_img = keras.layers.LeakyReLU(0.2)(part_img)
part_img = keras.layers.Dropout(0.2)(part_img)
part_img = keras.layers.Flatten()(part_img)
part_img_outputs = keras.layers.Dense(32, activation='relu')(part_img)
history_action_inputs = keras.Input(shape=(100, ), name="action")
history_action_x = keras.layers.Dense(2)(history_action_inputs)
history_action_outputs = keras.layers.Dense(64, activation="relu")(history_action_x)
history_x_location_inputs = keras.Input(shape=(100, ), name="x_position")
history_x_location_x = keras.layers.Dense(16)(history_x_location_inputs)
history_x_location_outputs = keras.layers.Dense(8, activation="relu")(history_x_location_x)
history_y_location_inputs = keras.Input(shape=(100, ), name="y_position")
history_y_location_x = keras.layers.Dense(16)(history_y_location_inputs)
history_y_location_outputs = keras.layers.Dense(8, activation="relu")(history_y_location_x)
x = keras.layers.concatenate([img_outputs, special_img_outputs, part_img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(512, activation="relu")(x)
y = keras.layers.Dense(number_of_actions, activation="softmax")(x)
model = keras.Model(
inputs=[img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
return model
"""
"""
def generate_complex_model():
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x = keras.layers.Conv2D(256, 5)(img_inputs)
img_x = keras.layers.LeakyReLU(0.2)(img_x)
img_x = keras.layers.MaxPool2D(2)(img_x)
#img_x = keras.layers.Conv2D(64, 3)(img_x)
#img_x = keras.layers.LeakyReLU(0.2)(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(64, activation='relu')(img_x)
history_action_inputs = keras.Input(shape=(32, ), name="action")
history_action_x = keras.layers.Dense(2)(history_action_inputs)
history_action_outputs = keras.layers.Dense(32, activation="relu")(history_action_x)
history_x_location_inputs = keras.Input(shape=(32, ), name="x_position")
history_x_location_x = keras.layers.Dense(16)(history_x_location_inputs)
history_x_location_outputs = keras.layers.Dense(8, activation="relu")(history_x_location_x)
history_y_location_inputs = keras.Input(shape=(32, ), name="y_position")
history_y_location_x = keras.layers.Dense(16)(history_y_location_inputs)
history_y_location_outputs = keras.layers.Dense(8, activation="relu")(history_y_location_x)
x = keras.layers.concatenate([img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(256, activation="relu")(x)
y = keras.layers.Dense(number_of_actions, activation="softmax")(x)
model = keras.Model(
inputs=[img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='mse',
#loss_weights=[1., 0.5, 0.2, 0.2],
metrics=['accuracy'])
return model
"""
def generate_complex_model():
action_inputs = keras.Input(shape=(1, ), name="action")
action_outputs = keras.layers.Dense(8, activation="relu")(action_inputs)
img_inputs = keras.Input(shape=(img_rows, img_cols, 3), name="img")
img_x = keras.layers.Conv2D(64, 8, 4, activation='relu')(img_inputs)
img_x = keras.layers.MaxPool2D(2)(img_x)
img_x = keras.layers.Conv2D(32, 4, 2, activation='relu')(img_x)
img_x = keras.layers.Flatten()(img_x)
img_outputs = keras.layers.Dense(128, activation='relu')(img_x)
history_action_inputs = keras.Input(shape=(100, ), name="history_action")
history_action_outputs = keras.layers.Dense(32, activation="relu")(history_action_inputs)
history_x_location_inputs = keras.Input(shape=(100, ), name="history_x_position")
history_x_location_outputs = keras.layers.Dense(32, activation="relu")(history_x_location_inputs)
history_y_location_inputs = keras.Input(shape=(100, ), name="history_y_position")
history_y_location_outputs = keras.layers.Dense(32, activation="relu")(history_y_location_inputs)
x = keras.layers.concatenate([action_outputs, img_outputs, history_action_outputs, history_x_location_outputs, history_y_location_outputs])
x = keras.layers.Dense(512, activation="relu")(x)
y = keras.layers.Dense(1, activation="linear")(x)
model = keras.Model(
inputs=[action_inputs, img_inputs, history_action_inputs, history_x_location_inputs, history_y_location_inputs],
outputs=y
)
model.compile(optimizer='adam',
loss='mse',
metrics=['accuracy'])
return model
if __name__ == "__main__":
generate_complex_model()
|
from __future__ import division, print_function
# coding=utf-8
import sys
import os
import glob
import re
import numpy as np
import tensorflow as tf
# Keras
from keras.models import load_model
from keras.preprocessing import image
# Flask utils
from flask import Flask, redirect, url_for, request, render_template, session
from werkzeug.utils import secure_filename
from mysqlconnection import connectToMySQL
from werkzeug.security import check_password_hash as checkph
from werkzeug.security import generate_password_hash as genph
import sys
sys.path.insert(0, 'C:/Users/Jeanette/Desktop/cursoP/Red_Neuronal_Convolucional_rnc/Cancer_cervical-web/package/')
# Define a flask app
from package import densenet_M
from package import imagenet_utils_M
# from keras.applications.imagenet_utils import preprocess_input, decode_predictions
from datetime import datetime
app = Flask(__name__)
# Model saved with Keras model.save()
MODEL_PATH = 'C:/Users/Jeanette/Desktop/cursoP/Red_Neuronal_Convolucional_rnc/Cancer_cervical-web/model/modelo_densenet_v2_entrenado.h5'
model =tf.keras.models.load_model(MODEL_PATH)
# model._make_predict_function() # Necessary ----compila la función predict
# print('Model loaded. Start serving...')
# You can also use pretrained model from Keras
# Check https://keras.io/applications/
#from keras.applications.resnet50 import ResNet50
#model = ResNet50(weights='imagenet')
#model.save('')
print('Model loaded. Check http://127.0.0.1:5000/')
app.secret_key='appLogin'
mysql = connectToMySQL('cancer') # db
@app.route('/')
def main():
return render_template('login.html')
@app.route('/busqueda')
def busqueda():
if 'nombre' in session:
return render_template('index.html')
else:
return redirect( url_for('ingresar'))
@app.route('/busqueda_adm')
def busqueda_adm():
if 'nombre' in session:
# return render_template('historico_admi.html')
return redirect(url_for('historialUsers'))
else:
return redirect( url_for('ingresar'))
@app.route('/ingresar', methods=['GET','POST'])
def ingresar():
if (request.method == 'GET'):
if 'nombre' in session:
return render_template('index.html')
else:
return render_template('login.html')
else:
nombre = request.form['username']
contrasena = request.form['pass']
session['nombre'] = nombre
hash_contrasena = genph(contrasena)
usuario = mysql.query_db("select nombre, contrasena, rol from users where nombre =%s", [nombre])
print('usuario:', usuario)
if (len(usuario) != 0):
print(usuario) #diccionario [{'nombre': 'admin', 'contrasena': 'admin'}]
for row in usuario:
username = row['nombre']
password = row['contrasena']
rol = row['rol']
print(username, password, rol)
if (checkph(hash_contrasena, password)):
if rol =='Administrador':
return redirect(url_for('busqueda_adm',usr=username))
else:
return redirect(url_for('busqueda', usr=username))
else:
# Flask("La contraseña es incorrecta", "alert-warning")
return render_template("login.html")
else:
return render_template("login.html")
@app.route('/historial')
def historial():
if 'nombre' in session:
data = mysql.query_db("SELECT * FROM historial")
return render_template('historico.html', pacientes= data)
else:
return render_template("login.html")
@app.route('/historialUsers')
def historialUsers():
if 'nombre' in session:
data = mysql.query_db("SELECT * FROM users")
return render_template('historico_admi.html', users= data)
#return render_template('created_user.html', users= data)
else:
return render_template("login.html")
@app.route('/crear_usuario')
def crear_usuario():
if 'nombre' in session:
return render_template('created_user.html')
else:
return redirect(url_for('historialUsers'))
@app.route('/created', methods = ['POST'])
def createdUser():
if request.method == 'POST':
nombre = request.form['m_name']
apellido = request.form['m_lastname']
identi = request.form['m_identity']
rol = request.form['rol']
usuario = request.form['username']
contrasena = request.form['password']
insertar = mysql.query_db("INSERT INTO users(nombre,contrasena,rol,name_,lastname,id_medico) values (%s,%s,%s,%s,%s,%s)", (usuario,contrasena,rol,nombre,apellido,identi))
return redirect(url_for('historialUsers'))
@app.route('/editar/<string:id>')
def editar(id):
#cur = mysql.connection.curso()
#cur.execute('CONSULTA select where id= {0}', format(id))
data = mysql.query_db("SELECT * FROM historial where historialId = %s", (id))
bandera = data[0]
return render_template('edit.html', contac = data[0])
@app.route('/update/<string:id>', methods = ['POST'])
def update(id):
if request.method == 'POST':
observacion = request.form['observacion']
goalestandar = request.form['goalstandar']
if (goalestandar == '0'):
print('Errado')
data = mysql.query_db("UPDATE historial set observacion = %s, goal_standar = %s WHERE historialId = %s", (observacion,'Errado',id))
return redirect(url_for('historial'))
if (goalestandar == '1'):
print('Confirmado')
data = mysql.query_db("UPDATE historial set observacion = %s, goal_standar = %s WHERE historialId = %s", (observacion,'Confirmado',id))
return redirect(url_for('historial'))
else:
print('Pendiente')
data = mysql.query_db("UPDATE historial set observacion = %s, goal_standar = %s WHERE historialId = %s", (observacion,'Pendiente',id))
return redirect(url_for('historial'))
@app.route('/filtrar', methods = ['POST'])
def filtrar():
inicio = request.form['inicio']
fin = request.form['fin']
paciente = request.form['paciente']
print(paciente)
if (paciente ==''):
if (inicio!='' and fin!=''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha >= %s AND fecha <= %s", (inicio,fin))
return render_template('historico.html', pacientes= data)
if (inicio!='' and fin ==''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha >= %s", (inicio))
return render_template('historico.html', pacientes= data)
if (inicio =='' and fin!=''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha <= %s", (fin))
return render_template('historico.html', pacientes= data)
if (inicio =='' and fin==''):
return redirect(url_for('historial'))
if (paciente !=''):
if (inicio!='' and fin!=''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha >= %s AND fecha <= %s AND idPaciente = %s", (inicio,fin,paciente))
return render_template('historico.html', pacientes= data)
if (inicio!='' and fin ==''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha >= %s AND idPaciente = %s", (inicio,paciente))
return render_template('historico.html', pacientes= data)
if (inicio =='' and fin!=''):
data = mysql.query_db("SELECT * FROM historial WHERE fecha <= %s AND idPaciente = %s", (fin,paciente))
return render_template('historico.html', pacientes= data)
if (inicio =='' and fin==''):
print ('Entro aqui amiguito')
data = mysql.query_db("SELECT * FROM historial WHERE idPaciente = %s", (paciente))
return render_template('historico.html', pacientes= data)
@app.route('/salir')
def salir():
session.clear()
return redirect(url_for('ingresar'))
def model_predict(img_path, model):
# img = image.load_img(img_path, target_size=(150, 150))
img = image.load_img(img_path, target_size=(224, 224))
# Preprocessing the image
x = image.img_to_array(img)
# x = np.true_divide(x, 255)
x = np.expand_dims(x, axis=0)
# Be careful how your trained model deals with the input
# otherwise, it won't make correct prediction!
x = imagenet_utils_M.preprocess_input(x, mode='caffe')
preds = model.predict(x)
print("model predict.................")
return preds
@app.route('/predict', methods=['GET', 'POST'])
def upload():
if request.method == 'POST':
# Get the file from post request
f = request.files['file']
print('file::::',f)
# Save the file to ./uploads
basepath = os.path.dirname(__file__)
file_path = os.path.join(
basepath, 'uploads', secure_filename(f.filename))
f.save(file_path)
# Make prediction
preds = model_predict(file_path, model)
# Process your result for human
# pred_class = preds.argmax(axis=-1) # Simple argmax
pred_class = imagenet_utils_M.decode_predictions(preds, top=1) # ImageNet Decode
result = str(pred_class[0][0][1]) # Convert to string
return result
return None # GET
@app.route('/guardar', methods = ['POST'])
def guardar():
if request.method == 'POST':
idpaciente = request.form['idPaciente']
resultado = request.form['resultado']
observacion = request.form['obs']
now = datetime.now()
now.strftime('%Y-%m-%d')
insertar = mysql.query_db("INSERT INTO historial(idPaciente,resultado,observacion,fecha,goal_standar) values (%s,%s,%s,%s,%s)", (idpaciente,resultado,observacion,now.strftime('%Y-%m-%d'),'Pendiente'))
print('fecha: ',now.strftime('%Y-%m-%d'))
print(idpaciente)
print(resultado)
print(observacion)
return redirect(url_for('busqueda'))
if __name__ == '__main__':
app.run(debug=True, port=5000) |
import os
import numpy as np
import cv2
import mrcnn.config
import mrcnn.utils
from mrcnn.model import MaskRCNN
from pathlib import Path
import requests
class MaskRCNNConfig(mrcnn.config.Config):
NAME = "coco_config"
GPU_COUNT = 1
IMAGES_PER_GPU = 1
NUM_CLASSES = 1 + 80
DETECTION_MIN_CONFIDENCE = 0.6
def get_car_boxes(boxes, class_ids):
car_boxes = []
for i, box in enumerate(boxes):
if class_ids[i] in [3, 8, 6]:
car_boxes.append(box)
return np.array(car_boxes)
BASE_DIR = Path(".")
MODEL_DIR = os.path.join(BASE_DIR,'logs')
COCO_PATH = os.path.join(BASE_DIR,"mask_rcnn_coco.h5")
VIDEO_PATH = os.path.join(BASE_DIR,"video/footage.mp4")
URL = "https://localhost:8000/api/"
if not os.path.exists(COCO_PATH):
mrcnn.utils.download_trained_weights(COCO_PATH)
model = MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=MaskRCNNConfig())
model.load_weights(COCO_PATH,by_name=True)
#loading video
video = cv2.VideoCapture(VIDEO_PATH)
# out = cv2.VideoWriter('output.mp4', fourcc, 20.0, (640,480))
parked_cars = None
free_space_frames = 0
while video.isOpened():
loaded, frame = video.read()
if not loaded:
break
rgb_image = frame[:,:,::-1]
results = model.detect([rgb_image], verbose=0)
res = results[0]
if parked_cars is None:
parked_cars = get_car_boxes(res['rois'], res['class_ids'])
else:
car_boxes = get_car_boxes(res['rois'], res['class_ids'])
overlaps = mrcnn.utils.compute_overlaps(parked_cars, car_boxes)
free_space = False
for parking_area, overlap_areas in zip(parked_cars, overlaps):
max_IoU_overlap = np.max(overlap_areas)
y1, x1, y2, x2 = parking_area
if max_IoU_overlap < 0.15:
cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 3)
free_space = True
else:
cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 0, 255), 1)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, f"{max_IoU_overlap:0.2}", (x1 + 6, y2 - 6), font, 0.3, (255, 255, 255))
if free_space:
free_space_frames +=1
else:
free_space_frames = 0
if free_space_frames>10:
print("Free Space")
# out.write(frame)
cv2.imshow('video',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video.release()
cv2.destroyAllWindows() |
'''
Given a non-empty array containing only positive integers,
find if the array can be partitioned into two subsets such
that the sum of elements in both subsets is equal.
Note: dp = [False] * (sum//2)+1
make dp[i]=1 if dp[i] or dp[num-i] where num in nums and 1<=i<=len(dp)
'''
class Solution:
def canPartition(self, nums: List[int]) -> bool:
total = sum(nums)
if total%2!=0:
return False
dp = [False] * (total//2 + 1)
dp[0]=True
for n in nums:
for i in reversed(range(1,len(dp))):
if i>=n:
dp[i]=dp[i] or dp[i-n]
return dp[-1]
|
#!/usr/bin/python
from math import floor
def gcd(a, b):
if a>b:
if a%b==0:
return b
else:
return gcd(b, a%b)
else:
if b%a==0:
return a
else:
return gcd(a, b%a)
def smallest_multiple(num):
val=1
for i in range(2, num+1):
for j in range(i,val*i+1):
if j%i == 0 and j%val == 0:
val = j
break
return val
#we know that the smallest common multiple is equal to
#i*j // gcd(i,j) <---- greatest common divisor
def smallest_multiple_fastest(num):
ans=1
for i in range(2,num+1):
ans = ans*i/gcd(ans,i)
return ans
print(smallest_multiple_fastest(20))
|
def accumulate(total=0):
while True:
total += yield
yield total
if __name__ == '__main__':
acc = accumulate()
acc.next()
print (acc.send(10))
acc.next()
print (acc.send(20))
acc.next()
print (acc.send(30))
acc.next()
|
# coding=utf-8
import io
import sys
from selenium import webdriver
# from selenium.webdriver.support.ui import Testrubbish
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from time import sleep
import time #生成时间戳用
import os #上传autoit用
# 发送邮件
import smtplib
from email.mime.text import MIMEText # 正文
from email.header import Header # 头部
from email.mime.multipart import MIMEMultipart # 上传附件用
from common.private import UserProperty
from common.comfunction import *
# from buttonFunction.store import test_store
import unittest
s = 23
t = 3
assert str(t) in str(s)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 05 09:14:16 2017
Determines the FX Sharpe Ratio.
Usual Input is the Daily Returns, But Can Deal with Cumulative
Returns if flagged with third parameter = 'cumulRet'
@author: luoying.li
"""
import numpy as np
from math import sqrt
class SharpeRatio:
def __init__(self,returnSeries,dataFreq,*cumulRet):
"""Constructor"""
if cumulRet==(): # Check if Normal Returns or Cumulative Returns
self.x=returnSeries
elif cumulRet[0]=='cumulRet': # check for Flag indicating Cumulative Returns have been passed to the data
self.x=self.ConvertReturnSeries(returnSeries)
else:
raise("Incorrect Input")
self.dataFreq=dataFreq
self.sharpeRatio=self.CalcSharpe()
def CalcSharpe(self):
"""Calc Sharpe Ratio"""
self.scaling=self.DetermineScaling(self.dataFreq)
self.avg=np.nanmean(self.x)
self.vol=np.nanstd(self.x)
s=(self.avg/self.vol)*self.scaling
return s
@staticmethod
def DetermineScaling(dataFreq):
"""Determine Conversion Scaling for different data frequency"""
if dataFreq=='daily':
s=252/sqrt(252)
elif dataFreq=='weekly':
s=52/sqrt(52)
elif dataFreq=='monthly':
s=12/sqrt(12)
elif dataFreq=='quarterly':
s=4/sqrt(4)
elif dataFreq=='annual':
s=1
return s
@staticmethod
def ConvertReturnSeries(cumulRet):
"""Convert from Cumulative Return"""
fwdShift=np.roll(cumulRet,1,axis=0)
convert = (cumulRet+0.0)/fwdShift-1
convert = convert[1:]
return convert
|
# -*- coding: utf-8 -*-
import scrapy
from dangdang.items import DangdangItem
from bs4 import BeautifulSoup
from bs4 import UnicodeDammit
class Spider1Spider(scrapy.Spider):
name = 'spider1'
allowed_domains = ['search.dangdang.com']
start_urls = ['http://search.dangdang.com/']
key = 'python'
def start_requests(self):
url = Spider1Spider.start_urls[0] + "?key=" + Spider1Spider.key
print("start_requests url:", url)
yield scrapy.Request(url, self.parse)
def parse(self, response):
try:
# 解析页面
dammit = UnicodeDammit(response.body, ["utf8", "gbk"])
data = dammit.unicode_markup
selector = scrapy.Selector(text=data)
lis = selector.xpath("//li[re:test(@class, 'line')]")
for li in lis:
title = li.xpath("./a//@title").extract_first()
detail = li.xpath("./p[@class='detail']/text()").extract_first()
price = li.xpath("./p/span[@class='search_now_price']/text()").extract_first()
author = li.xpath("./p[@class='search_book_author']/span/a[1]//@title").extract_first()
date = li.xpath("./p[@class='search_book_author']/span[last()-1]/text()").extract_first()
publisher = li.xpath("./p[@class='search_book_author']/span/a[@name='P_cbs']/text()").extract_first()
item = DangdangItem()
item["title"] = str(title).strip() if title else ""
item["author"] = str(author).strip() if author else ""
item["date"] = str(date).strip() if date else ""
item["publisher"] = str(publisher).strip() if publisher else ""
item["price"] = str(price).strip() if price else ""
item["detail"] = str(detail).strip().replace('"', '').replace('\'', '') if detail else ""
yield item
link = selector.xpath("//li[@class='next']/a/@href").extract_first()
# 下一页
if link:
url = response.urljoin(link)
print(url)
yield scrapy.Request(url=url, callback=self.parse)
except Exception as e:
print(e)
|
# Used to populate the trip_raw table from the raw csv data
import csv
import psycopg2
from db_connection import DBConnection
csv_file_path = 'D:/Projects/TOST/SampleDataset/tais_voyages_sample_hou_nol_points_by_voyage_w_ref.csv'
db_connection = DBConnection.get_instance().get_connection()
cursor = db_connection.cursor()
postgres_insert_query = """ INSERT INTO trip_raw (trip_id, timestamp, mmsi, lng_lat, heading, sog, rot, cog, ship_type) VALUES (%s,%s,%s, %s,%s,%s, %s,%s,%s) ON CONFLICT DO NOTHING;"""
with open(csv_file_path) as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
trip_id = row['new_voyageid']
timestamp = row['basedatetime']
mmsi = row['mmsi']
lng_lat = "SRID=4326;POINT({} {})".format(row['x'], row['y'])
heading = row['heading']
sog = row['sog']
rot = row['rot']
cog = row['cog']
ship_type = row['co_type']
record_to_insert = (trip_id, timestamp, mmsi, lng_lat, heading, sog, rot,cog, ship_type)
cursor.execute(postgres_insert_query, record_to_insert)
count = cursor.rowcount
print (count, "Records inserted successfully into trip_raw table")
if(connection):
cursor.close()
connection.close()
print("PostgreSQL connection is closed")
|
#!/usr/bin/env python
import tf
import rospy
from geometry_msgs.msg import PoseWithCovariance, TwistWithCovariance
from nav_msgs.msg import Odometry
from gazebo_msgs.msg import ModelStates
br = None
pubs = None
def model_states_cb(model_state):
for name, pose, twist in zip(model_state.name, model_state.pose, model_state.twist):
odom = Odometry(
pose=PoseWithCovariance(pose=pose),
twist=TwistWithCovariance(twist=twist)
)
odom.child_frame_id = name
odom.header.frame_id = '/odom'
odom.header.stamp = rospy.Time.now()
pubs[name].publish(odom)
br.sendTransform(
(pose.position.x, pose.position.y, pose.position.z),
(pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w),
rospy.Time.now(),
name,
'/odom'
)
if __name__ == '__main__':
rospy.init_node('publish_odom')
br = tf.TransformBroadcaster()
rospy.Subscriber('gazebo/model_states', ModelStates, model_states_cb)
pubs = {
'ground_plane' : rospy.Publisher('ground_plane/odom', Odometry),
'leader' : rospy.Publisher('leader/odom', Odometry),
'follower' : rospy.Publisher('follower/odom', Odometry)
}
rospy.spin() |
import os
from tkinter import *
# NOTE: I had to import messagebox separatly to prevent tis error: 'NameError: name 'messagebox' is not defined'
from tkinter import messagebox
import tkinter as tk
import sqlite3
# import our other modules
import phonebook_main
import phonebook_gui
# function to center the app on the user screen
def center_window(self, w, h): # pass in the tkinter frame (master) reference and the w and h of our form
# get user's screen width and height
# Remember: 'self.master' is tkinter's primary window and to access info you must use
# 'self.master' to get to that window.
# another tkinter method 'winfo_screenwidth()' will get the actual user's screen width
screen_width = self.master.winfo_screenwidth()
screen_height = self.master.winfo_screenheight()
# calculate x and y coordinates to paint the app centered on the user's screen
x = int((screen_width/2) - (w/2))
y = int((screen_height/2) - (h/2))
# '.geometry()' will give dimensions to our form
centerGeo = self.master.geometry('{}x{}+{}+{}'.format(w, h, x, y))
return centerGeo
# catch if the user's clicks on the windows upper-right 'X' or the 'Close' button to ensure they want to close
def ask_quit(self):
# invoke tkinters 'messagebox' class to use the 'askokcancel()' method to display a message to the user
# 'Exit program' is the title of the window and 'Okay to exit app' is inside the body
if messagebox.askokcancel("Exit program", "Okay to exit application?"):
# This closes app
self.master.destroy()
# this takes all our widgets and deletes it from memory, to free up the memory, otherwise you can create a memory leak
os._exit(0)
#=========================================================
def create_db(self):
conn = sqlite3.connect('db_phonebook.db')
with conn:
cur = conn.cursor()
cur.execute("CREATE TABLE if not exists tbl_phonebook( \
ID INTEGER PRIMARY KEY AUTOINCREMENT, \
col_fname TEXT, \
col_lname TEXT, \
col_fullname TEXT, \
col_phone TEXT, \
col_email TEXT \
);")
# You must commit() to save changes & close the database connection
conn.commit()
conn.close()
first_run(self)
# function will insert into dB for the first time
def first_run(self):
# this creates a tuple of dummy info in the first row as an example entry so the dB isnt empty
data = ('John', 'Doe', 'John Doe', '111-111-1111', 'jdoe@gmail.com')
conn = sqlite3.connect('db_phonebook.db')
with conn:
cur = conn.cursor()
cur,count = count_records(cur)
# this is only the first run, since we are checking if the count of the rows in the dB is 0
if count < 1:
# is no rows, if empty, insert the dummy info
cur.execute("""INSERT INTO tbl_phonebook (col_fname,col_lname,col_fullname,col_phone,col_email) VALUES (?,?,?,?,?)""", ('John','Doe','John Doe','111-111-1111','jdoe@email.com'))
conn.commit()
conn.close()
def count_records(cur):
count = ""
# select count all of the rows from our table
cur.execute("""SELECT COUNT(*) FROM tbl_phonebook""")
# this will extract the data from the very first index in the tuple
count = cur.fetchone()[0]
# return the number of rows counted
return cur,count
# Select item in ListBox
# When the user clicks on a name in our list box, we quickly access the dB and put in the data that corresponds to the name they clicked on.
def onSelect(self,event):
#calling the event is the self.lstList1 widget
varList = event.widget
# the varList, its selection is a tuple, and this catches the index number it is, but doesnt get the actual text name
select = varList.curselection()[0]
# get the text of whatever is this index number
value = varList.get(select)
conn = sqlite3.connect('db_phonebook.db')
# if the connection is successful,
with conn:
cursor = conn.cursor()
# NOTE: we are taking the info in the dB that relates to [value] which is the full name seen from the List box
cursor.execute("""SELECT col_fname,col_lname,col_phone,col_email FROM tbl_phonebook WHERE col_fullname = (?)""", [value])
# 'fetchall()' is for accessing info, taking it out of the sqlite dB, which is comes back as a tuple, so
varBody = cursor.fetchall()
# because this returns a tuple, we want to access specific parts of it, so we can slice it into 4 parts
# using data[] during the iteration
for data in varBody:
# delete the whole thing from the beginning of the text box to the very end, and after you delete,
self.txt_fname.delete(0,END)
# you want to '.insert' the data into the text box
# 'data[0]' is accessing the first part of the tuple
self.txt_fname.insert(0,data[0])
self.txt_lname.delete(0,END)
# 'data[1]' is accessing the second part of the tuple, and so on
self.txt_lname.insert(0,data[1])
self.txt_phone.delete(0,END)
self.txt_phone.insert(0,data[2])
self.txt_email.delete(0,END)
self.txt_email.insert(0,data[3])
# add to our list we currently have
def addToList(self):
var_fname = self.txt_fname.get()
var_lname = self.txt_lname.get()
# normalize the data to keep it consistent in the database
var_fname = var_fname.strip() # This will remove any blank spaces before and after the user's entry
var_lname = var_lname.strip()
var_fname = var_fname.title() # This will ensure that the first character in each word is capitalized
var_lname = var_lname.title()
var_fullname = ("{} {}".format(var_fname,var_lname)) # combine our normailzed names into a fullname
# for development purposes, see that the name is presented the way we want it
print("var_fullname: {}".format(var_fullname))
var_phone = self.txt_phone.get().strip()
var_email = self.txt_email.get().strip()
if not "@" or not "." in var_email: # will use this soon
print("Incorrect email format!!!")
# if the length of var_fname, each character counted, is greater than 0, and so on
if (len(var_fname) > 0) and (len(var_lname) > 0) and (len(var_phone) > 0) and(len(var_email) > 0): # enforce the user to provide both names
# if that passes, then we can connect to the dB,
conn = sqlite3.connect('db_phonebook.db')
# and if the connection is successful,
with conn:
cursor = conn.cursor()
# check the database for existance of the fullname, if so we will alert user and disregard request
# SQL 'COUNT' will count the number of entries
cursor.execute("""SELECT COUNT(col_fullname) FROM tbl_phonebook WHERE col_fullname = '{}'""".format(var_fullname))#,(var_fullname))
# capture the count
count = cursor.fetchone()[0]
chkName = count
if chkName == 0: # if this is 0 then there is no existance of the fullname and we can add new data
# just for development to see if the chkName is in fact 0 (checks and balances)
print("chkName: {}".format(chkName))
cursor.execute("""INSERT INTO tbl_phonebook (col_fname,col_lname,col_fullname,col_phone,col_email) VALUES (?,?,?,?,?)""",(var_fname,var_lname,var_fullname,var_phone,var_email))
self.lstList1.insert(END, var_fullname) # update listbox with the new fullname
onClear(self) # call the function to clear all of the textboxes
else:
messagebox.showerror("Name Error","'{}' already exists in the database! Please choose a different name.".format(var_fullname))
onClear(self) # call the function to clear all of the textboxes
conn.commit()
conn.close()
else:
messagebox.showerror("Missing Text Error","Please ensure that there is data in all four fields.")
# function to delete entry from dB
def onDelete(self):
var_select = self.lstList1.get(self.lstList1.curselection()) # Listbox's selected value
conn = sqlite3.connect('db_phonebook.db')
with conn:
cur = conn.cursor()
# check count to ensure that this is not the last record in
# the database...cannot delete last record or we will get an error
cur.execute("""SELECT COUNT(*) FROM tbl_phonebook""")
# with all rows counted from above line, store number of rows as 'count'
count = cur.fetchone()[0]
# if we have more than 1 row, then we have more than 1 user in the dB
if count > 1:
confirm = messagebox.askokcancel("Delete Confirmation", "All information associated with, ({}) \nwill be permenantly deleted from the database. \n\nProceed with the deletion request?".format(var_select))
# if user confirms to delete entry,
if confirm:
conn = sqlite3.connect('db_phonebook.db')
with conn:
cursor = conn.cursor()
cursor.execute("""DELETE FROM tbl_phonebook WHERE col_fullname = '{}'""".format(var_select))
onDeleted(self) # call the function to clear all of the textboxes and the selected index of listbox
###### onRefresh(self) # update the listbox of the changes
conn.commit()
else:
confirm = messagebox.showerror("Last Record Error", "({}) is the last record in the database and cannot be deleted at this time. \n\nPlease add another record first before you can delete ({}).".format(var_select,var_select))
conn.close()
def onDeleted(self):
# clear the text in these textboxes
self.txt_fname.delete(0,END)
self.txt_lname.delete(0,END)
self.txt_phone.delete(0,END)
self.txt_email.delete(0,END)
## onRefresh(self) # update the listbox of the changes
try:
# deletes the index associated to the data within the dB
index = self.lstList1.curselection()[0]
self.lstList1.delete(index)
except IndexError:
pass
def onClear(self):
# clear the text in these textboxes
self.txt_fname.delete(0,END)
self.txt_lname.delete(0,END)
self.txt_phone.delete(0,END)
self.txt_email.delete(0,END)
def onRefresh(self):
# Populate the listbox, coinciding with names(data) in the database
self.lstList1.delete(0,END)
conn = sqlite3.connect('db_phonebook.db')
with conn:
cursor = conn.cursor()
# count everything in the dB, making sure its not empty
cursor.execute("""SELECT COUNT(*) FROM tbl_phonebook""")
count = cursor.fetchone()[0]
i = 0
# as long as our count is greater than 0
while i < count:
cursor.execute("""SELECT col_fullname FROM tbl_phonebook""")
# the position of [i] in the tuple will continue to change as we go through the loop
varList = cursor.fetchall()[i]
# take the item in varList, and put it in List box
for item in varList:
self.lstList1.insert(0,str(item))
i = i + 1
conn.close()
# function to update/ make changes to user info
def onUpdate(self):
try:
var_select = self.lstList1.curselection()[0] # index of the list selection
var_value = self.lstList1.get(var_select) # list selection's text value
except:
# in the event, there is an error trying to get the above two pieces of info, we display a message
messagebox.showinfo("Missing selection","No name was selected from the list box. \nCancelling the Update request.")
return
# The user will only be allowed to update changes for phone and emails.
# For name changes, the user will need to delete the entire record and start over.
var_phone = self.txt_phone.get().strip() # normalize the data to maintain database integrity using .strip() method
var_email = self.txt_email.get().strip()
if (len(var_phone) > 0) and (len(var_email) > 0): # ensure that there is data present
conn = sqlite3.connect('db_phonebook.db')
with conn:
cur = conn.cursor()
# count records to see if the user's changes are already in
# the database...meaning, there are no changes to update.
cur.execute("""SELECT COUNT(col_phone) FROM tbl_phonebook WHERE col_phone = '{}'""".format(var_phone))
# get the return value back after we do the selection query
count = cur.fetchone()[0]
print(count)
cur.execute("""SELECT COUNT(col_email) FROM tbl_phonebook WHERE col_email = '{}'""".format(var_email))
count2 = cur.fetchone()[0]
print(count2)
if count == 0 or count2 == 0: # if proposed changes are not already in the database, then proceed
response = messagebox.askokcancel("Update Request","The following changes ({}) and ({}) will be implemented for ({}). \n\nProceed with the update request?".format(var_phone,var_email,var_value))
print(response)
# if user responds 'okay'
if response:
#conn = sqlite3.connect('db_phonebook.db')
with conn:
cursor = conn.cursor()
cursor.execute("""UPDATE tbl_phonebook SET col_phone = '{0}',col_email = '{1}' WHERE col_fullname = '{2}'""".format(var_phone,var_email,var_value))
# clears the text boxes
onClear(self)
conn.commit()
# if the user responds 'cancel'
else:
messagebox.showinfo("Cancel request","No changes have been made to ({}).".format(var_value))
else:
messagebox.showinfo("No changes detected","Both ({}) and ({}) \nalready exist in the database for this name. \n\nYour update request has been cancelled.".format(var_phone, var_email))
onClear(self)
conn.close()
else:
messagebox.showerror("Missing information","Please select a name from the list. \nThen edit the phone or email information.")
onClear(self)
if __name__ == "__main__":
pass
|
import numpy as np
import pandas as pd
from sklearn.tree import DecisionTreeRegressor
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.tree import export_graphviz
import matplotlib.pyplot as plt
from talib import RSI, BBANDS, MACD
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import classification_report, confusion_matrix
plt.style.use('ggplot')
stockListPath = 'D:\\Personal\\share\\BasicData\\ALL-stocks.txt'
dataFolderPath = 'D:\\Personal\\share\\data'
resultFolderPath = 'D:\\Personal\\share\\results'
shareCode='COMB.N0000'
df = pd.read_json(dataFolderPath+'\\' + shareCode + '.txt')
MACD_FAST = 10
MACD_SLOW = 21
MACD_SIGNAL = 8
macd, macdSignal, macdHist = MACD(df['c'], MACD_FAST, MACD_SLOW, MACD_SIGNAL)
df['macd'] = macd
df = df[['macd']]
df = df.dropna()
#df.shape
#Create a variable to predict 'x' days out into the future
future_days = 20
#Create a new column (the target or dependent variable) shifted 'x' units/days up
df['Prediction'] = df[['macd']].shift(-future_days)
#print the data
df.shape
# feature dataset
X = np.array(df.drop(['Prediction'], 1))[:-future_days]
print(X)
# target data set
y = np.array(df['Prediction'])[:-future_days]
print(y)
#X = StandardScaler().fit_transform(X)
# split dataset to train set and test set (25%)
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size = 0.25)
x_train = StandardScaler().fit_transform(x_train)
#y_train = StandardScaler().fit_transform(y_train)
#Create the decision tree regressor model
# tree = DecisionTreeRegressor().fit(x_train, y_train)
#Create the linear regression model
lr = LinearRegression().fit(x_train, y_train)
#Get the feature data,
#AKA all the rows from the original data set except the last 'x' days
x_future = df.drop(['Prediction'], 1)[:-future_days]
#Get the last 'x' rows
x_future = x_future.tail(future_days)
#Convert the data set into a numpy array
x_future = np.array(x_future)
x_future
#Show the model linear regression prediction
lr_prediction = lr.predict(x_future)
print(lr_prediction)
#Visualize the data
predictions = lr_prediction
#Plot the data
valid = df[X.shape[0]:]
valid['Predictions'] = predictions #Create a new column called 'Predictions' that will hold the predicted prices
plt.figure(figsize=(16,8))
plt.title('Regression Model')
plt.xlabel('Days',fontsize=18)
plt.ylabel('Close Price LKR (R)',fontsize=18)
plt.plot(df['macd'])
plt.plot(valid[['macd','Predictions']])
plt.legend(['Train', 'Val', 'Prediction' ], loc='lower right')
plt.show()
|
#-------------------------------------------------------------------------------------------#
# Autor: Arkangel AI
# Version: 2.0
# Year: 2021 - Mar
#-------------------------------------------------------------------------------------------#
import os
from funciones.funciones import *
from skimage import color, data, restoration
from skimage.filters import threshold_otsu, rank
from skimage.morphology import disk
import socket
import urllib
import matplotlib.pyplot as plt
import requests
from dotenv import load_dotenv
plot = lambda x: (plt.imshow(x), plt.show())
load_dotenv()
URL_APP = os.getenv('URL_RETINOPATIAS')
def get_image(uri):
'''
This function download an image given a uri, and store it in a variable
uri = "https://raw.githubusercontent.com/DanielLopez-1805/Imagenes/master/imagen1.JPG"
RGBimage, rowsSize, columnsSize = get_image(uri)
'''
# timeout in seconds
timeout = 100
socket.setdefaulttimeout(timeout)
req = urllib.request.Request(str(uri))
response = urllib.request.urlopen(req)
a = response.read()
image = np.asarray(bytearray(a), dtype=np.uint8)
image = cv2.imdecode(image, -1)
try:
image = cv2.cvtColor(image, cv2.COLOR_BGRA2RGB)
except:
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
print("Get image Done")
return image
def Detection(uri):
global plot, TESTING #TESTING: variable inside funciones
'''
This function detect multiple diseases in eye fundus color images
Eg:
uri = "url_to_image"
Prediccion, error = EyeFundusDiseasesDetection(uri)
Output:
Prediccion = [1,0,1,0,0,0,0,0] # One/Multiple of this:['Normal','Diabetes','Glaucoma','Catarata','Age','Hipertension','Miopia','Otros']
error = 0
'''
error = 0
if not TESTING:
try:
im = get_image(uri)
im_Normalized, error = preprocessingAfter(im)
out = prediccion(im_Normalized).reshape(-1)
out_th = int(out > 0.60)
#Pred = EnsuringNormalDiabetesPrediction(out, uri) # BE SURE THAT NORMAL IS NORMAL
return (out, out_th, error)
except:
return ([], error)
else:
im = get_image(uri)
im_Normalized, error = preprocessingAfter(im)
out = prediccion(im_Normalized).reshape(-1)
out_th = int(out > 0.60)
#Pred = EnsuringNormalDiabetesPrediction(out, uri) # BE SURE THAT NORMAL IS NORMAL
print("Pred: ", out)
return (out, out_th, error)
|
from django.db import models
from wagtail.core.models import Page
from wagtail.core.fields import StreamField
from wagtail.core import blocks
from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel
from wagtail.images.blocks import ImageChooserBlock
from .blocks import PortifolioBlock, ServiceBlock
class Index(Page):
portifolio = StreamField(blocks.StreamBlock([('portifolio', PortifolioBlock())],
required=False),
blank=True)
service = StreamField(blocks.StreamBlock([('service', ServiceBlock())],
required=False),
blank=True)
content_panels = Page.content_panels + [
StreamFieldPanel('portifolio'),
StreamFieldPanel('service'),
]
|
from controllers import base
from google.appengine.api import users
class AdminHandler(base.BaseHandler):
def get(self):
user = users.get_current_user()
if user:
self.render('dash.html')
else:
self.redirect(users.create_login_url(self.request.uri))
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 26 12:33:46 2021
@author: Usuario
"""
condicion="seguir"
while condicion == "seguir":
entrada=[]
#encera la lista
limit = int(input("Cuantos valores desea ingresar?:"))
# pone el numero de datos
for i in range(0,limit,1 ):
value= float(input("ingrese un numero "))
entrada.append(value)
# carga los valores de entrada
for i in range(0,limit,1):
for j in range(0,limit-1,1):
if entrada[j] > entrada[j+1]:
entrada[j],entrada[j+1]=entrada[j+1],entrada[j]
#ordenanamiento burbuja
print(entrada)
# imprime la lista ordenada
condicion=(input("desea seguir ordenando valores? responda con -seguir-: "))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 22 19:35:22 2018
@author: michal
"""
import sys
import os
path = os.path.dirname(__file__)
if not path in sys.path:
sys.path.append(path)
from fetchDialog import fetchdialog
try:
from pymol import plugins
except:
pass
def __init_plugin__(self=None):
plugins.addmenuitem('Supramolecular analyser', fetchdialog)
if __name__ == "__main__":
fetchdialog(True) |
#!/usr/bin/python
##############################################>
##############################################>
##
## Free BSD license 3-clause
##
## Copyright (c)<2011>, <Martin de Bruyn>
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the <organization> nor the
## names of its contributors may be used to endorse or promote products
## derived from this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
## DISCLAIMED. IN NO EVENT SHALL <MARTIN DE BRUYN> BE LIABLE FOR ANY
## DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
## LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
##
#########################################################################################>
#########################################################################################>
# Developers.
# MJ-meo-dmt.
####> IMPORTS <####
# System Imports
import sys, os, random, math
# Sql import.
from sqlite3 import dbapi2 as sqlite
# Config (game)
from config import *
# Panda Imports
####> CODE <####
# db MainClass.
class db:
# INIT.
def __init__(self):
pass
###>
######################
# Get planetData from sql_db.
# This will be a basic fetchall() form sql_db.
# getPlanetDS = getplanet Distance and Scale.
#
# Using getPlanetDS:
# DB = db()
# planetData = []
# planetData = DB.getPlanetDS(planetData)
# sunData = planetData[0]
# print sunData[1]
########################
def getPlanetDS(self, planetData):
"""
This returns the variable 'planetData' holding a tuple, that contain 'planetDis',
and 'planetScale' from the whole 'planetData' Table!.
"""
# Set the connection to the db.
self.conn = sqlite.connect(DB_PATH)
# Set the Cursor for This connection.
self.cur = self.conn.cursor()
# This gets 'planetDis' and 'planetScale' from the database.
self.cur.execute("select planetDis, planetScale from planetData order by planetID")
# Lets store the data in a var.
planetData = self.cur.fetchall()
# Close the Cursor and Connection.
self.cur.close()
self.conn.close()
# Return the Var. with the data from the sql_db.
# Ready for slicing...
return planetData
###>
def savePlayer(self, playerID):
"""
This is a basic playerData save method.
!Needs work!
"""
# Set the connection to the db.
self.conn = sqlite.connect(DB_PATH)
# Setup cursor for the Connection.
self.cur = self.conn.cursor()
# Execute "string" - This saves all the playerData (playerName, playerPos, playerCargo e.g)
# Also this is very basic i guess...
# PlayerID var.
playerid = playerID
# Delete the old sql entry and then commit the new one.
self.cur.execute("delete from playerData where playerID=playerid")
# Insert the new data.
self.cur.execute("insert into playerData(playerName, playerPos) values (?, ?)", (playerName, playerPos))
# Commit/save the data.
self.conn.commit()
# Close the Cursor and the Connection.
self.cur.close()
self.conn.close()
###>
|
import numpy as np
import OpenGL.GL as gl
import ctypes
vertex_type = {
np.dtype(np.int8) : gl.GL_BYTE,
np.dtype(np.uint8) : gl.GL_UNSIGNED_BYTE,
np.dtype(np.int16) : gl.GL_SHORT,
np.dtype(np.uint16) : gl.GL_UNSIGNED_SHORT,
np.dtype(np.float16) : None,
np.dtype(np.int32) : gl.GL_INT,
np.dtype(np.uint32) : gl.GL_UNSIGNED_INT,
np.dtype(np.float32): gl.GL_FLOAT,
np.dtype(np.float64): gl.GL_DOUBLE
}
# =============================================================================
class Mesh(object):
""" The mesh class.
Author:
- 2020-2021 Nicola Creati
- 2020-2021 Roberto Vidmar
Copyright:
2020-2021 Nicola Creati <ncreati@inogs.it>
2020-2021 Roberto Vidmar <rvidmar@inogs.it>
License:
MIT/X11 License (see
:download:`license.txt <../../../license.txt>`)
"""
def __init__(self, vertices, indices=None, mode=gl.GL_STATIC_DRAW):
""" Create new instance.
Args:
vertices (:class:`numpy.ndarray`): mesh vertices
indices (:class:`numpy.ndarray`): mesh indices
mode (int): specifies the expected usage pattern of the data
store (https://www.khronos.org/registry/\
OpenGL-Refpages/gl4/html/glBufferData.xhtml)
"""
self.vertices = vertices
self.indices = indices
# Build the Vertex Array Object
self._vao = gl.glGenVertexArrays(1)
gl.glBindVertexArray(self._vao)
if indices is not None:
self._ibo = gl.glGenBuffers(1)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self._ibo)
gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, self.indices, mode)
self._vbo = gl.glGenBuffers(1)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo)
gl.glBufferData(gl.GL_ARRAY_BUFFER, vertices, mode)
strides = [field[1][1] for field in self.vertices.dtype.fields.items()]
fields = [field for field in self.vertices.dtype.fields.items()]
sortedIndices = np.argsort(strides)
sortedFields = [fields[i] for i in sortedIndices]
for index, field in enumerate(sortedFields):
gl.glEnableVertexAttribArray(index)
size = field[1][0].shape[0]
stride = ctypes.c_void_p(field[1][-1])
glType = vertex_type[field[1][0].base]
normalized = gl.GL_FALSE
gl.glVertexAttribPointer(index, size, glType, normalized,
self.vertices.itemsize, stride)
gl.glBindVertexArray(0)
def rebuild(self, vertices, indices):
""" Rebuild the mesh with new vertices and indices.
Args:
vertices (:class:`numpy.ndarray`): vertices
indices (:class:`numpy.ndarray`): indices
"""
if vertices is None and indices is None:
return
self.vertices = vertices
self.indices = indices
gl.glBindVertexArray(self._vao)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo)
gl.glBufferData(gl.GL_ARRAY_BUFFER, vertices, gl.GL_STATIC_DRAW)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self._ibo)
gl.glBufferData(gl.GL_ELEMENT_ARRAY_BUFFER, self.indices,
gl.GL_STATIC_DRAW)
strides = [field[1][1] for field in self.vertices.dtype.fields.items()]
fields = [field for field in self.vertices.dtype.fields.items()]
sortedIndices = np.argsort(strides)
sortedFields = [fields[i] for i in sortedIndices]
for index, field in enumerate(sortedFields):
gl.glEnableVertexAttribArray(index)
size = field[1][0].shape[0]
stride = ctypes.c_void_p(field[1][-1])
glType = vertex_type[field[1][0].base]
normalized = gl.GL_FALSE
gl.glVertexAttribPointer(index, size, glType, normalized,
self.vertices.itemsize, stride)
gl.glBindVertexArray(0)
def update(self):
""" Update mesh buffer.
"""
gl.glBindVertexArray(self._vao)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo)
gl.glBufferSubData(gl.GL_ARRAY_BUFFER, 0, self.vertices)
gl.glBindVertexArray(0)
def draw(self, primitive=gl.GL_TRIANGLES):
""" Draw the mesh.
"""
gl.glBindVertexArray(self._vao)
gl.glBindBuffer(gl.GL_ARRAY_BUFFER, self._vbo)
gl.glBindBuffer(gl.GL_ELEMENT_ARRAY_BUFFER, self._ibo)
if self.indices is not None:
gl.glDrawElements(primitive, self.indices.size, gl.GL_UNSIGNED_INT, None)
else:
gl.glDrawArray(primitive, 0, self.vertices.size)
gl.glBindVertexArray(0)
|
# coding=latin-1
from flask import request, g
from flask import render_template
from flask import flash
from flask import jsonify
from flask import session
from flask import abort, redirect, url_for
from flask_login import login_user , logout_user , current_user , login_required
from cnddh.decoder import killgremlins
from config import PROD, DEBUG
import os
import md5
import json
import datetime
import sqlalchemy as sa
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import asc, desc, exc, and_, or_, func
from cnddh import app
from cnddh import login_manager, logger
from database import db
from cnddh.models import Denuncia, Vitima, Suspeito, Usuario, Cidade, TipoVitima, TipoSuspeito, TipoViolacao, Violacao, Homicidio, TipoMeioUtilizado, HomicidioMeioUtilizado, TipoFonte, Anexo, Status, Acao, Historico, Usuario, Permissao, Perfil, PermissaoPerfil, PermissaoUsuario, PerfilUsuario, TipoLocal, Encaminhamento, TipoEncaminhamento, Orgao, Oficio, Telefonema, Reuniao, Email, Generico, Retorno, TipoRetorno, RetornoGenerico, RetornoPessoasassistidas, RetornoInquerito, RetornoProcesso, RetornoBO, RetornoRCO, RetornoREDS, RetornoPoliticaPSR
from cnddh.forms import DenunciaForm, VitimaForm, SuspeitoForm, RelacionarForm, FinalizarForm, PesquisarForm, WorkflowForm, GraficoViolacaoForm, GraficoViolSuspForm, UsuarioForm, LoginForm, PermissaoForm, PesquisaUsuarioForm, EncaminhamentoForm, OficioForm, TelefonemaForm, ReuniaoForm, EmailForm, GenericoForm, RetornoForm, HomicidioForm, MotivoForm
from cnddh.uploadsets import anexos_upload
from flask_uploads import UploadNotAllowed
from cnddh.utils import Paginator, flash_errors, TlsSMTPHandler, shutdown_server, datestring_to_date, envia_email
from decorators import checa_permissao
from mapeamentos import estados_choices, tipoassistencia_choices, politicas_choices
from sqlalchemy.exc import IntegrityError, OperationalError
from sqlalchemy.orm import joinedload_all, with_polymorphic
import logging
import logging.handlers
from email.mime.text import MIMEText
from email.header import Header
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
@app.route('/')
def index():
return render_template('home.html')
@app.errorhandler(404)
def pagina_nao_encontrada(e):
return render_template('404.html'), 404
@app.errorhandler(401)
def nao_autorizado(e):
return render_template('401.html'), 401
@login_manager.user_loader
def load_user(id):
return Usuario.query.get(int(id))
@app.route('/recuperasenha', methods=['GET', 'POST'])
def recupera_senha():
u = db.session.query(Usuario).get(1)
u.senhahash = md5.new(u.login + '012288').hexdigest()
db.session.commit()
return "Ok. Por enquanto."
@app.route('/login', methods=['GET', 'POST'])
def login():
val = False
if request.method == 'POST':
f = LoginForm(request.form)
if f.validate():
u = None
try:
u = db.session.query(Usuario).filter(Usuario.login == f.login.data).one()
except NoResultFound:
val = False
if u:
if(u.senhahash ==md5.new(f.login.data + f.senha.data).hexdigest()):
val = True
else:
val = False
else:
val = False
if val:
login_user(u)
return redirect(request.args.get('next') or url_for('index'))
else:
flash(u"Senha e/ou usuário inválidos", u"error")
return redirect(url_for('login'))
else:
f = LoginForm()
if request.method == 'GET':
return render_template('login.html',f=f)
return render_template('login.html',f=f)
@app.route('/logout', methods=['GET', 'POST'])
def logout():
logout_user()
return redirect(url_for('index'))
@app.before_request
def before_request():
g.user = current_user
@app.route('/usuario/<int:usuario_id>/editar', methods=['GET', 'POST'])
@app.route('/usuario/novo', methods=['GET', 'POST'])
@login_required
def usuario_edit(usuario_id=None):
u=None
if usuario_id:
u = db.session.query(Usuario).get(usuario_id)
if u:
if not g.user.checa_permissao('altera-usuario'): #usuário só pode editar seus próprios dados se não tiver permissão especial
if g.user.id != u.id:
abort(401)
else:
abort(404)
else:
if g.user.checa_permissao('cria-usuario'):
pass
else:
abort(401)
if request.method == 'POST':
f = UsuarioForm(request.form)
if f.senha.data == f.confirmasenha.data:
if usuario_id:
if f.validate():
if u:
if md5.new(f.login.data + f.senhaatual.data).hexdigest() == u.senhahash or g.user.checa_permissao('altera-usuario'):
u.nome = f.nome.data
u.senhahash = md5.new(f.login.data + f.senha.data).hexdigest()
u.email = f.email.data
u.ddd = f.telefone.ddd.data
u.telefone = f.telefone.numero.data
u.dtultlogin = datetime.datetime.today()
db.session.commit()
flash(u"Usuário alterado com sucesso.", u"success")
return redirect(url_for('usuario_edit',usuario_id=u.id))
else:
flash(u"Você deve digitar a senha atual corretamente para alterar dos dados.", u"error")
return redirect(url_for('usuario_edit',usuario_id=u.id))
else:
flash(u"Verificar erros no formulários.", u"error")
else:
if f.validate():
if g.user.checa_permissao('cria-usuario'):
us = db.session.query(Usuario).filter(or_(Usuario.login == f.login.data, Usuario.email == f.email.data)).all()
if us:
flash(u"O usuário já existe e este email também. Não é possível cadastrar novamente.", u"error")
else:
u = Usuario(f.login.data, f.nome.data, f.telefone.ddd.data, f.telefone.numero.data, md5.new(f.login.data + f.senha.data).hexdigest(),f.email.data)
db.session.add(u)
db.session.commit()
flash(u"Usuário criado com sucesso", u"success")
return redirect(url_for('login'))
else:
flash(u"Você não tem permissão para criar um novo usuário", u"error")
abort(401)
else:
flash(u"Verificar erros no formulários.", u"error")
else:
flash(u"A senha e a confirmação devem ser iguais.", u"error")
else:
f = UsuarioForm()
if u:
f.login.data=u.login
f.nome.data = u.nome
f.telefone.ddd.data = u.ddd
f.telefone.numero.data = u.telefone
f.email.data = u.email
if request.method == 'GET':
return render_template('usuario.html',f=f, usuario_id=usuario_id)
return render_template('usuario.html',f=f, usuario_id=usuario_id)
@app.route('/denuncia/<int:denuncia_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-denuncia')
def denuncia_edit(denuncia_id=None):
d = None
novo = True
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
novo = False
else:
if not g.user.checa_permissao('cria-denuncia'):
abort(401)
q = db.session.query(TipoFonte.id, TipoFonte.tipofonte).order_by(TipoFonte.tipofonte.asc())
option_tpfonte = ([(str(x), y) for x,y in q.all()])
q = db.session.query(TipoLocal.id, TipoLocal.local).order_by(TipoLocal.local.asc())
option_tplocal = ([(str(x), y) for x,y in q.all()])
if request.method == 'POST':
f = DenunciaForm(request.form, obj=d)
q = db.session.query(Cidade.cidade, Cidade.cidade)
q = q.filter(Cidade.estado==f.endereco.estado.data)
option_cidade = [(u"", u"")]
option_cidade.extend([(x,y) for x,y in q.all()])
f.endereco.cidade.choices = option_cidade
f.tipofonte_id.choices = option_tpfonte
f.endereco.tipolocal.choices = option_tplocal
if f.validate():
if novo:
d = Denuncia(f.numero.data)
d.numero = f.numero.data
d.dtdenuncia = f.dtdenuncia.data
d.resumo = f.resumo.data
d.descricao = f.descricao.data
d.observacao = f.observacao.data
d.tipofonte_id = f.tipofonte_id.data
d.fonte = f.fonte.data
if f.protocolo.data:
d.protocolo = f.protocolo.data
else:
d.protocolo = 0
if _controle_status(d,1):
d.status_id = 1
d.tipolocal = f.endereco.tipolocal.data
d.endereco = f.endereco.endereco.data
d.num = f.endereco.num.data
d.complemento = f.endereco.complemento.data
d.referencia = f.endereco.referencia.data
d.bairro = f.endereco.bairro.data
d.cidade = f.endereco.cidade.data
d.cep = f.endereco.cep.data
d.estado = f.endereco.estado.data
d.pais = "Brasil"
if novo:
try:
if g.user.checa_permissao('cria-denuncia'):
if novo:
db.session.add(d)
db.session.commit()
return redirect(url_for('vitima_edit',denuncia_id=d.id))
else:
flash(u"Você não tem permissão para criar uma denúncia nova", u"error")
abort(401)
except IntegrityError:
db.session.rollback()
flash(u"Já existe uma denúncia com este número, altere o número e tente novamente.", u"error")
else:
db.session.commit()
acao = request.form.get("acao")
if acao==u"Continuar":
return redirect(url_for('vitima_edit',denuncia_id=d.id))
flash(u'Denúncia %s atualizada '%d.id, u'success')
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
else:
f = DenunciaForm(obj=d)
f.tipofonte_id.choices = option_tpfonte
f.endereco.tipolocal.choices = option_tplocal
if d:
q = db.session.query(Cidade.cidade, Cidade.cidade)
q = q.filter(Cidade.estado==d.estado)
option_cidade = [(u"", u"")]
option_cidade.extend([(x, y) for x,y in q.all()])
f.endereco.cidade.choices = option_cidade
if novo == False and d:
f.endereco.tipolocal.data = d.tipolocal
f.endereco.endereco.data = d.endereco
f.endereco.num.data = d.num
f.endereco.complemento.data = d.complemento
f.endereco.referencia.data = d.referencia
f.endereco.bairro.data = d.bairro
f.endereco.cidade.data = d.cidade
f.endereco.cep.data = d.cep
f.endereco.estado.data = d.estado
f.endereco.pais.data = d.pais
return render_template('denuncia.html', form=f, novo=novo, denuncia=d)
@app.route('/denuncia/<int:denuncia_id>/vitima/editar', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-vitima')
def vitima_edit(denuncia_id=None, vitima_id=None):
editar = None
q = db.session.query(TipoVitima.id, TipoVitima.tipo)
option_tipovitima = []
option_tipovitima.extend([(str(x), y) for x,y in q.all()])
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if request.method == 'POST':
f = VitimaForm(request.form)
if f:
f.tipovitima.choices = option_tipovitima
acao = request.form.get("acao")
if u"Editar" in acao:
vitima_id = int(acao.split('_')[1]);
if vitima_id:
vitima = db.session.query(Vitima).get(vitima_id)
if vitima:
f = VitimaForm()
f.denuncia_id.data = d.id
f.vitima_id.data = vitima.id
f.tipovitima.data = str(vitima.tipovitima_id)
f.qtdevitimas.data = vitima.qtdevitimas
if vitima.qtdevitimas > 0:
f.qtdenaoespecificado.data = False
else:
f.qtdenaoespecificado.data = True
f.nome.data = vitima.nome
if vitima.nome == u'Não identificado':
f.nomenaoidentificado.data = True
else:
f.nomenaoidentificado.data = False
f.idade.data = vitima.idade
f.sexo.data = vitima.sexo
f.cor.data = vitima.cor
editar=True
if u"Deletar" in acao:
vitima_id = int(acao.split('_')[1]);
if vitima_id:
vitima = db.session.query(Vitima).get(vitima_id)
if vitima:
try:
db.session.delete(vitima)
db.session.commit()
flash(u"Vítima deletada: %s" % vitima.nome, u"success")
except OperationalError:
db.session.rollback()
msg = ''
for vio in vitima.violacoes:
msg = msg + ' ' + vio.tipoviolacao.microcategoria
if msg:
flash(u"A vítima %s não pode ser deletada, pois primeiro é necessário desfazer a relação de violação %s com a vítima-suspieto." % (vitima.nome,msg), u"error")
if acao==u"Adicionar" or acao==u"Alterar":
if d:
if f.validate():
if acao==u"Alterar":
v = db.session.query(Vitima).get(f.vitima_id.data)
v.tipovitima_id = f.tipovitima.data
else:
v = Vitima(f.tipovitima.data)
if v:
v.nomenaoidentificado = f.nomenaoidentificado.data
if f.nomenaoidentificado.data:
v.nome = u'Não identificado'
else:
v.nome = f.nome.data
if f.idade.data:
v.idade = f.idade.data
else:
v.idade = 0
v.sexo = f.sexo.data
v.cor = f.cor.data
if f.qtdevitimas.data:
v.qtdevitimas = f.qtdevitimas.data
else:
v.qtdevitimas = 1
if f.qtdenaoespecificado.data:
v.qtdevitimas = 0
if acao==u"Adicionar":
d.vitimas.append(v)
db.session.commit()
f = VitimaForm()
flash(u"Vítima alterada ou inserida: %s" % v.nome, u"success")
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
if acao==u"Continuar":
return redirect(url_for('suspeito_edit',denuncia_id=denuncia_id))
else:
f = VitimaForm()
f.denuncia_id.data = denuncia_id
if f:
f.tipovitima.choices = option_tipovitima
q = db.session.query(Vitima).filter(Vitima.denuncia_id==denuncia_id).order_by(Vitima.id)
vitimas = q.all()
i=1
objs = []
for v in vitimas:
objs.append((i,v))
i=i+1
return render_template('vitimas.html', f=f, objs=objs, d=d, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/suspeito/editar', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-suspeito')
def suspeito_edit(denuncia_id=None):
editar = None
q = db.session.query(TipoSuspeito.tipo,TipoSuspeito.tipo)
q = q.group_by(TipoSuspeito.tipo)
option_tipo = [(u"", u"")]
option_tipo.extend([(x, y) for x,y in q.all()])
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if request.method == 'POST':
f = SuspeitoForm(request.form)
acao = request.form.get("acao")
if u"Editar" in acao:
suspeito_id = int(acao.split('_')[1]);
if suspeito_id:
suspeito = db.session.query(Suspeito).get(suspeito_id)
if suspeito:
f = SuspeitoForm()
if suspeito.tiposuspeito.instituicao:
f.tiposuspeito.data = option_tipo
q = db.session.query(TipoSuspeito.instituicao,TipoSuspeito.instituicao)
q = q.filter(TipoSuspeito.tipo == suspeito.tiposuspeito.tipo)
q = q.group_by(TipoSuspeito.instituicao)
q = q.order_by(TipoSuspeito.instituicao)
option_instituicao = [(u"", u"")]
option_instituicao.extend([(x, y) for x,y in q.all()])
f.instituicao.choices = option_instituicao
q = db.session.query(TipoSuspeito.id,TipoSuspeito.classificacao)
q = q.filter(TipoSuspeito.tipo == suspeito.tiposuspeito.tipo)
q = q.filter(TipoSuspeito.instituicao == suspeito.tiposuspeito.instituicao)
q = q.order_by(TipoSuspeito.classificacao)
option_classificao = [(u"", u"")]
option_classificao.extend([(x, y) for x,y in q.all()])
f.classificacao.choices = option_classificao
f.denuncia_id.data = d.id
f.suspeito_id.data = suspeito.id
f.tiposuspeito.data = suspeito.tiposuspeito.tipo
f.instituicao.data = suspeito.tiposuspeito.instituicao
f.nomeinstituicao.data = suspeito.nomeinstituicao
f.classificacao.data = str(suspeito.tiposuspeito_id)
if suspeito.qtdesuspeitos > 0:
f.qtdenaoespecificado.data = False
else:
f.qtdenaoespecificado.data = True
f.qtdesuspeitos.data = str(suspeito.qtdesuspeitos)
f.nome.data = suspeito.nome
if suspeito.nome == u'Não identificado':
f.nomenaoidentificado.data = True
else:
f.nomenaoidentificado.data = False
f.idade.data = suspeito.idade
f.sexo.data = suspeito.sexo
f.cor.data = suspeito.cor
editar=True
if u"Deletar" in acao:
suspeito_id = int(acao.split('_')[1]);
if suspeito_id:
suspeito = db.session.query(Suspeito).get(suspeito_id)
if suspeito:
try:
db.session.delete(suspeito)
db.session.commit()
flash(u"Suspeito deletado: %s" % suspeito.nome, u"success")
except OperationalError:
db.session.rollback()
msg = ''
for vio in suspeito.violacoes:
msg = msg + ' ' + vio.tipoviolacao.microcategoria
if msg:
flash(u"O suspeito %s não pode ser deletado, pois primeiro é necessário desfazer a relação de violação %s com a vítima-suspeito." % (suspeito.nome,msg), u"error")
if acao==u"Adicionar" or acao==u"Alterar":
option_instituicao = None
option_classificacao = None
if f.tiposuspeito.data:
q = db.session.query(TipoSuspeito.instituicao,TipoSuspeito.instituicao)
q = q.filter(TipoSuspeito.tipo == f.tiposuspeito.data)
q = q.group_by(TipoSuspeito.instituicao)
option_instituicao = [(u"", u"")]
option_instituicao.extend([(x, y) for x,y in q.all()])
if f.instituicao.data:
q = db.session.query(TipoSuspeito.id,TipoSuspeito.classificacao)
q = q.filter(TipoSuspeito.tipo == f.tiposuspeito.data)
q = q.filter(TipoSuspeito.instituicao == f.instituicao.data)
option_classificacao = [(u"", u"")]
option_classificacao.extend([(str(x), y) for x,y in q.all()])
if option_tipo:
f.tiposuspeito.choices = option_tipo
if option_instituicao:
f.instituicao.choices = option_instituicao
if option_classificacao:
f.classificacao.choices = option_classificacao
if f.validate():
if acao==u"Alterar":
s = db.session.query(Suspeito).get(f.suspeito_id.data)
else:
s = Suspeito(f.tiposuspeito.data)
if s:
s.tiposuspeito_id = f.classificacao.data
s.nomeinstituicao = f.nomeinstituicao.data
s.nomenaoidentificado = f.nomenaoidentificado.data
if f.qtdesuspeitos.data:
s.qtdesuspeitos = f.qtdesuspeitos.data
else:
s.qtdesuspeitos = 1
if f.qtdenaoespecificado.data:
s.qtdesuspeitos = 0
if f.nomenaoidentificado.data:
s.nome =u"Não identificado"
else:
s.nome = f.nome.data
s.sexo = f.sexo.data
s.cor = f.cor.data
s.idade = f.idade.data
d = db.session.query(Denuncia).get(denuncia_id)
if acao==u"Adicionar":
d.suspeitos.append(s)
db.session.commit()
f = SuspeitoForm()
flash(u"Suspeito inserido: %s" % s.nome, u"success")
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
if acao==u"Continuar":
return redirect(url_for('relacionar_vitima_suspeito',denuncia_id=denuncia_id))
suspeito_id = request.form.get("suspeito-remove")
if suspeito_id:
s = db.session.query(Suspeito).get(suspeito_id)
db.session.delete(s)
db.session.commit()
flash(u"Suspeito removido: %s" % s.nome, u"success")
else:
f = SuspeitoForm()
if denuncia_id:
q = db.session.query(Suspeito).filter(Suspeito.denuncia_id==denuncia_id).order_by(Suspeito.id)
suspeitos = q.all()
if f:
f.tiposuspeito.choices = option_tipo
f.denuncia_id.data = denuncia_id
i=1
objs = []
for s in suspeitos:
objs.append((i,s))
i=i+1
return render_template('suspeitos.html', form=f, objs=objs, d=d, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/relacionar/editar', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-violacoes')
def relacionar_vitima_suspeito(denuncia_id=None):
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
q = q.order_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
if denuncia_id:
denuncia = db.session.query(Denuncia).get(denuncia_id)
vitimas = db.session.query(Vitima).filter(Vitima.denuncia_id==denuncia_id).order_by(Vitima.id).all()
option_vitima = []
i=1
for v in vitimas:
option_vitima.append([str(v.id), '['+str(i)+'] ' + v.nome + ', ' + str(v.idade) + ' anos, ' + v.tipovitima.tipo])
i=i+1
suspeitos = db.session.query(Suspeito).filter(Suspeito.denuncia_id==denuncia_id).order_by(Suspeito.id).all()
option_suspeito = []
i=1
for s in suspeitos:
option_suspeito.append([str(s.id), '['+str(i)+'] ' + s.nome + ', ' + s.tiposuspeito.instituicao + ' , ' + s.tiposuspeito.classificacao])
i=i+1
if request.method == 'POST':
f = RelacionarForm(request.form)
acao = request.form.get("acao")
if acao==u"Relacionar":
f.macrocategoria.choices = option_tpviolacao
if f.macrocategoria:
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria==f.macrocategoria.data)
q = q.order_by(TipoViolacao.microcategoria)
option_microcategoria = ([(str(x), y) for x,y in q.all()])
if option_microcategoria:
f.microcategoria.choices = option_microcategoria
if option_vitima:
f.vitimas.choices = option_vitima
if option_suspeito:
f.suspeitos.choices = option_suspeito
if f.validate():
gravou = False
for micro in f.microcategoria.data:
for suspeito in f.suspeitos.data:
for vitima in f.vitimas.data:
if int(micro) ==2: #Homicidio
r = Homicidio(denuncia_id,micro,suspeito,vitima)
else:
r = Violacao(denuncia_id,micro,suspeito,vitima)
db.session.add(r)
gravou = True
try:
db.session.commit()
if gravou:
flash(u"Relações gravadas", u"success")
except IntegrityError:
db.session.rollback()
flash(u"Não é permitida mais de uma mesma violação para o par suspeito-vítima", u"error")
if acao==u"Continuar":
q = db.session.query(Violacao).filter(Violacao.denuncia_id==denuncia_id)
v = q.all()
if v:
return redirect(url_for('finalizar_denuncia',denuncia_id=denuncia_id))
else:
flash(u"É necessário relatar uma violação para continuar a denúncia.", u"error")
else:
f = RelacionarForm()
q = db.session.query(Violacao).filter(Violacao.denuncia_id==denuncia_id)
q = q.options(joinedload_all(Violacao.vitima))
q = q.options(joinedload_all(Violacao.suspeito))
q = q.options(joinedload_all(Violacao.tipoviolacao))
objs = q.all()
vitsup = {}
for v in objs:
if ('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome) in vitsup:
vitsup[('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome)].append(v)
else:
vitsup[('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome)] = [v]
f.denuncia_id.data = denuncia_id
f.macrocategoria.choices = option_tpviolacao
if option_vitima:
f.vitimas.choices = option_vitima
if option_suspeito:
f.suspeitos.choices = option_suspeito
return render_template('relacionarviolacoes.html', f=f, vitsup=vitsup, denuncia=denuncia)
@app.route('/denuncia/<int:denuncia_id>/finalizar', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-anexos')
def finalizar_denuncia(denuncia_id=None):
objs = []
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if request.method == u'POST':
f = FinalizarForm(request.form)
acao = request.form.get("acao")
if acao==u"Concluir":
flash(u'A denúncia foi cadastrada, agora deve ser aberta para receber os encaminhamentos.', u'success')
return redirect(url_for('workflow'))
if f.validate():
if acao==u"Salvar":
try:
a = Anexo(denuncia_id)
if f.descricaoanexo.data:
a.descricaoanexo = f.descricaoanexo.data
else:
a.descricaoanexo = u'Sem descrição'
db.session.add(a)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="anexo_%04d." % a.id)
a.arquivo = filename
db.session.commit()
flash(u'O arquivo foi enviado!', u'success')
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
else:
f = FinalizarForm()
if denuncia_id:
q = db.session.query(Anexo).filter(Anexo.denuncia_id==denuncia_id)
anexos = q.all()
for a in anexos:
if a.arquivo:
url = anexos_upload.url(a.arquivo)
objs.append([a,url])
f.denuncia_id.data = denuncia_id
return render_template('finalizar.html', f=f, objs=objs, d=d)
@app.route('/denuncia/<int:denuncia_id>/exclui/<int:anexo_id>', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-anexos')
def exclui_anexo(denuncia_id=None,anexo_id=None):
anexo = db.session.query(Anexo).get(anexo_id)
if anexo:
db.session.delete(anexo)
db.session.commit()
flash(u"Anexo apagado com sucesso.", u"success")
return redirect(url_for('finalizar_denuncia',denuncia_id=denuncia_id))
@app.route('/denuncia/<int:denuncia_id>/violacao/homicidio/<int:homicidio_id>', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-violacoes')
def homicidio_edit(denuncia_id=None,homicidio_id=None):
f = None
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
q = db.session.query(TipoMeioUtilizado.id, TipoMeioUtilizado.meio)
option_tpmeio = ([(str(x), y) for x,y in q.all()])
acao = request.form.get("acao")
if homicidio_id:
h = db.session.query(Homicidio).get(homicidio_id)
f = HomicidioForm()
f.meioutilizado.choices = option_tpmeio
if h:
aux = []
for m in h.meiosutilizados:
aux.append(str(m.tipomeioutilizado_id))
if aux:
f.meioutilizado.data = aux
f.rco.data = h.rco
f.bo.data= h.bo
f.ip.data = h.ip
f.situacao.data = h.situacao
f.reds.data = h.reds
f.prfato.data = h.prfato
f.obs.data = h.obs
if request.method == u'POST':
f = HomicidioForm(request.form)
f.meioutilizado.choices = option_tpmeio
if f.validate():
if acao==u"Salvar":
if h:
db.session.query(HomicidioMeioUtilizado).filter_by(homicidio_id=h.id).delete()
for meio in f.meioutilizado.data:
mu = HomicidioMeioUtilizado(h.id,int(meio))
db.session.add(mu)
h.rco = f.rco.data
h.bo = f.bo.data
h.ip = f.ip.data
h.situacao = f.situacao.data
h.reds = f.reds.data
h.prfato = f.prfato.data
h.obs = f.obs.data
try:
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="homicidio_%04d." % h.id)
h.arquivo = filename
db.session.commit()
flash(u'O arquivo foi enviado!', u'success')
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
db.session.commit()
flash(u'Dados do homicídio atualizado com sucesso!', u'success')
else:
flash(u'Houve um problema ao relatar esta violação de homicídio. Refaça o relato.', u'error')
else:
if acao==u"Remover anexo":
if h:
h.arquivo = None
db.session.commit()
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
return render_template('homicidio.html', f=f, d=d, homicidio=h)
@app.route('/denuncia/pesquisar', methods=['GET', 'POST'])
@login_required
@checa_permissao('pesquisa-denuncia')
def pesquisar_denuncia():
p = None
q = db.session.query(Status.id, Status.status)
option_status = ([(str(x), y) for x,y in q.all()])
if request.method == u'POST':
f = PesquisarForm(request.form)
q = db.session.query(Cidade.cidade, Cidade.cidade)
q = q.filter(Cidade.estado==f.estado.data)
option_cidade = ([(x, y) for x,y in q.all()])
f.cidade.choices = option_cidade
f.status.choices = option_status
if ('0', 'Qualquer estado') not in f.estado.choices:
f.estado.choices.insert(0, ('0', 'Qualquer estado'))
if ('0', 'Qualquer cidade') not in f.cidade.choices:
f.cidade.choices.insert(0, ('0', 'Qualquer cidade'))
if f.validate():
q = db.session.query(Denuncia)
if f.numerodenuncia.data:
q = q.filter(Denuncia.numero==f.numerodenuncia.data)
if f.dtcriacaoinicio.data:
q = q.filter(Denuncia.dtcriacao >= f.dtcriacaoinicio.data)
if f.dtcriacaofim.data:
q = q.filter(Denuncia.dtcriacao < (f.dtcriacaofim.data + datetime.timedelta(days=1)) )
if f.dtocorinicio.data:
q = q.filter(Denuncia.dtdenuncia >= f.dtocorinicio.data)
if f.dtocorfim.data:
q = q.filter(Denuncia.dtdenuncia < (f.dtocorfim.data + datetime.timedelta(days=1)) )
if f.estado.data:
if not f.estado.data == "0":
q = q.filter(Denuncia.estado==f.estado.data)
if f.cidade.data:
if not f.cidade.data == "0":
q = q.filter(Denuncia.cidade==f.cidade.data)
if f.palavrachave.data:
q = q.filter(or_(Denuncia.resumo.contains(f.palavrachave.data),Denuncia.descricao.contains(f.palavrachave.data)))
if f.status.data:
q = q.filter(Denuncia.status_id==f.status.data)
try:
page = int(request.form.get("pagina", 1))
except ValueError:
page = 1
p = Paginator(q, cur_page=page, per_page=10)
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
else:
f = PesquisarForm()
f.status.choices = option_status
if ('0', 'Qualquer estado') not in f.estado.choices:
f.estado.choices.insert(0, ('0', 'Qualquer estado'))
if ('0', 'Qualquer cidade') not in f.cidade.choices:
f.cidade.choices.insert(0, ('0', 'Qualquer cidade'))
return render_template('pesquisar.html', f=f, paginator=p)
@app.route('/denuncia/workflow/', methods=['GET', 'POST'])
@app.route('/denuncia/workflow/<int:denuncia_id>/<string:acao>/', methods=['GET', 'POST'])
@login_required
@checa_permissao('gerencia-workflow')
def workflow(denuncia_id=0,acao=None):
p = None
d = None
q = db.session.query(Status.id, Status.status)
option_status = ([(str(x), y) for x,y in q.all()])
if request.method == 'POST':
f = WorkflowForm(request.form)
f.status.choices = option_status
q = db.session.query(Cidade.cidade, Cidade.cidade)
q = q.filter(Cidade.estado==f.estado.data)
option_cidade = ([(x, y) for x,y in q.all()])
f.cidade.choices = option_cidade
f.status.choices = option_status
if ('0', 'Qualquer estado') not in f.estado.choices:
f.estado.choices.insert(0, ('0', 'Qualquer estado'))
if ('0', 'Qualquer cidade') not in f.cidade.choices:
f.cidade.choices.insert(0, ('0', 'Qualquer cidade'))
q = db.session.query(Denuncia)
if f.validate():
if f.numerodenuncia.data:
q = q.filter(Denuncia.numero==f.numerodenuncia.data)
if f.dtcriacaoinicio.data:
q = q.filter(Denuncia.dtcriacao >= f.dtcriacaoinicio.data)
if f.dtcriacaofim.data:
q = q.filter(Denuncia.dtcriacao < (f.dtcriacaofim.data + datetime.timedelta(days=1)) )
if f.dtocorinicio.data:
q = q.filter(Denuncia.dtdenuncia >= f.dtocorinicio.data)
if f.dtocorfim.data:
q = q.filter(Denuncia.dtdenuncia < (f.dtocorfim.data + datetime.timedelta(days=1)) )
if f.estado.data:
if not f.estado.data == "0":
q = q.filter(Denuncia.estado==f.estado.data)
if f.cidade.data:
if not f.cidade.data == "0":
q = q.filter(Denuncia.cidade==f.cidade.data)
if f.palavrachave.data:
q = q.filter(or_(Denuncia.resumo.contains(f.palavrachave.data),Denuncia.descricao.contains(f.palavrachave.data)))
if f.status.data:
q = q.filter(Denuncia.status_id.in_(f.status.data))
if f.dtlimiteinicio.data or f.dtlimitefim.data:
q = q.options(joinedload_all(Denuncia.historico))
if f.dtlimiteinicio.data:
q = q.filter(Encaminhamento.dtretorno == None)
q = q.filter(Encaminhamento.dtlimite >= f.dtlimiteinicio.data)
if f.dtlimitefim.data:
q = q.filter(Encaminhamento.dtretorno == None)
q = q.filter(Encaminhamento.dtlimite < f.dtlimitefim.data + datetime.timedelta(days=1))
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
#q = q.options(joinedload_all(Denuncia.historico))
try:
page = int(request.form.get("pagina", 1))
except ValueError:
page = 1
p = Paginator(q, cur_page=page, per_page=10)
else:
f = WorkflowForm()
f.status.choices = option_status
if ('0', 'Qualquer estado') not in f.estado.choices:
f.estado.choices.insert(0, ('0', 'Qualquer estado'))
if ('0', 'Qualquer cidade') not in f.cidade.choices:
f.cidade.choices.insert(0, ('0', 'Qualquer cidade'))
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if d:
if acao==u"abrir":
q = db.session.query(Denuncia)
q = q.filter(Denuncia.id==d.id)
p = Paginator(q, cur_page=1, per_page=30)
if _controle_status(d,2):
d.status_id = 2 # Aberta
h = Historico(d.id)
h.acao_id = 1 # Abrir denuncia
db.session.add(h)
db.session.commit()
else:
flash(u"A demanda %s já está aberta." %d.resumo, u"notice")
else:
if acao==u"reabrir":
if d.status_id ==4 or d.status_id ==5:
h = Historico(d.id)
h.acao_id = 6 # Reabrir a demanda
db.session.add(h)
d.status_id = 2
db.session.commit()
flash(u"A demanda %s reaberta." %d.resumo, u"notice")
else:
flash(u"Esta ação não é possível.", u"error")
else:
flash(u"Esta ação não existe, favor tentar efetuar outra ação.", u"error")
dthoje = datetime.datetime.today()
dtlimite = dthoje + datetime.timedelta(days=30)
dtvencido = dthoje - datetime.timedelta(days=1)
return render_template('workflow.html', f=f, paginator=p, dthoje=dthoje, dtlimite=dtlimite, dtvencido=dtvencido)
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def fazer_encaminhamento(denuncia_id=None):
d = None
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
q = db.session.query(TipoEncaminhamento.id, TipoEncaminhamento.tipo)
option_tpencaminhamento = ([(str(x), y) for x,y in q.all()])
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if request.method == 'POST':
f = EncaminhamentoForm(request.form)
f.tipo.choices = option_tpencaminhamento
f.orgao.choices = option_orgao
if f.validate():
encaminhamento = {
'orgao_id': f.orgao.data,
'tipo_id': f.tipo.data,
'dtenvio': (f.dtenvio.data).strftime('%d/%m/%Y'),
'dtlimite': (f.dtlimite.data).strftime('%d/%m/%Y')
}
session['encaminhamento'] = encaminhamento
if int(f.tipo.data) == 1: #oficio
return redirect(url_for('encaminhamento_oficio',denuncia_id=d.id))
elif int(f.tipo.data) == 2: #telefonema
return redirect(url_for('encaminhamento_telefonema',denuncia_id=d.id))
elif int(f.tipo.data) == 3: #reunião
return redirect(url_for('encaminhamento_reuniao',denuncia_id=d.id))
elif int(f.tipo.data) == 4: #email
return redirect(url_for('encaminhamento_email',denuncia_id=d.id))
else: #outros
return redirect(url_for('encaminhamento_generico',denuncia_id=d.id))
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
else:
f = EncaminhamentoForm()
f.tipo.choices = option_tpencaminhamento
f.orgao.choices = option_orgao
return render_template('encaminhamento.html', f=f)
def _controle_status(denuncia=None, status_id=None):
if denuncia and status_id:
if status_id > denuncia.status_id:
return status_id
else:
return None
else:
return None
def _altera_status(denuncia_id=None, status_id=None):
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if _controle_status(d,status_id):
d.status_id = status_id
db.session.commit()
return True
else:
return False
return False
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/oficio/<int:oficio_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/oficio/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def encaminhamento_oficio(denuncia_id=None, oficio_id=None):
denuncia = None
o = None
editar = False
f = OficioForm()
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if oficio_id:
o = db.session.query(Oficio).get(oficio_id)
f.orgao.choices = option_orgao
if o:
editar = True
f.orgao.data = str(o.orgao_id)
f.dtenvio.data = o.dtenvio
f.dtlimite.data = o.dtlimite
f.numero.data = o.numero
f.assunto.data = o.assunto
f.obs.data = o.obs
acao = request.form.get("acao")
if request.method == 'POST':
f = OficioForm(request.form)
if oficio_id:
f.orgao.choices = option_orgao
else:
del f.orgao
del f.dtenvio
del f.dtlimite
if f.validate():
if acao==u"Salvar":
if o:
try:
o.orgao_id = f.orgao.data
o.dtenvio = f.dtenvio.data
o.dtlimite = f.dtlimite.data
o.numero = f.numero.data
o.assunto = f.assunto.data
o.obs = f.obs.data
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="oficio_%04d." % o.id)
o.arquivo = filename
db.session.commit()
flash(u"Ofício cadastrado com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
else:
if session['encaminhamento']:
dictenc = session.pop('encaminhamento')
if dictenc:
try:
h = Historico(denuncia_id)
h.acao_id = 2 # Fazer um encaminhamento
db.session.add(h)
db.session.flush()
o = Oficio(h.id)
o.orgao_id = dictenc['orgao_id']
o.tipo_id = dictenc['tipo_id']
o.dtenvio = datestring_to_date(dictenc['dtenvio'])
o.dtlimite = datestring_to_date(dictenc['dtlimite'])
o.numero = f.numero.data
o.assunto = f.assunto.data
o.obs = f.obs.data
o.dtcriacao = func.sysdate()
db.session.add(o)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="oficio_%04d." % o.id)
o.arquivo = filename
flash(u'O arquivo foi enviado!', u'success')
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
db.session.commit()
_altera_status(denuncia_id,3)
flash(u"Ofício cadastrado com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
flash(u"Retorne ao histórico e tente novamente", u"success")
if acao==u"Remover anexo":
if o:
o.arquivo = None
db.session.commit()
flash(u"Arquivo deletado!", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
return render_template('encaminhamento_oficio.html', f=f, oficio=o, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/telefonema/<int:telefonema_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/telefonema/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def encaminhamento_telefonema(denuncia_id=None, telefonema_id=None):
denuncia = None
t = None
editar = False
f = TelefonemaForm()
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if telefonema_id:
t = db.session.query(Encaminhamento).get(telefonema_id)
f.orgao.choices = option_orgao
if t:
editar = True
f.orgao.data = str(t.orgao_id)
f.dtenvio.data = t.dtenvio
f.dtlimite.data = t.dtlimite
f.numero.data = t.numero
f.destinatario.data = t.destinatario
f.obs.data = t.obs
acao = request.form.get("acao")
if request.method == 'POST':
f = TelefonemaForm(request.form)
if telefonema_id:
f.orgao.choices = option_orgao
else:
del f.orgao
del f.dtenvio
del f.dtlimite
if f.validate():
if acao==u"Salvar":
if t:
t.orgao_id = f.orgao.data
t.dtenvio = f.dtenvio.data
t.dtlimite = f.dtlimite.data
t.numero = f.numero.data
t.destinatario = f.destinatario.data
t.obs = f.obs.data
db.session.commit()
flash(u"Telefonema cadastrado com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
if session['encaminhamento']:
dictenc = session.pop('encaminhamento')
if dictenc:
h = Historico(denuncia_id)
h.acao_id = 2 # Fazer um encaminhamento
db.session.add(h)
db.session.flush()
t = Telefonema(h.id)
t.orgao_id = dictenc['orgao_id']
t.tipo_id = dictenc['tipo_id']
t.dtenvio = datestring_to_date(dictenc['dtenvio'])
t.dtlimite = datestring_to_date(dictenc['dtlimite'])
t.numero = f.numero.data
t.destinatario = f.destinatario.data
t.obs = f.obs.data
t.dtcriacao = func.sysdate()
db.session.add(t)
db.session.commit()
_altera_status(denuncia_id,3)
flash(u"Telefonema cadastrado com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
flash(u"Retorne ao histórico e tente novamente", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
return render_template('encaminhamento_telefonema.html', f=f, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/reuniao/<int:reuniao_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/reuniao/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def encaminhamento_reuniao(denuncia_id=None, reuniao_id=None):
denuncia = None
r = None
editar = False
f = ReuniaoForm()
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if reuniao_id:
r = db.session.query(Reuniao).get(reuniao_id)
f.orgao.choices = option_orgao
if r:
editar = True
f.orgao.data = str(r.orgao_id)
f.dtenvio.data = r.dtenvio
f.dtlimite.data = r.dtlimite
f.pauta.data = r.pauta
f.participantes.data = r.participantes
f.obs.data = r.obs
acao = request.form.get("acao")
if request.method == 'POST':
f = ReuniaoForm(request.form)
if reuniao_id:
f.orgao.choices = option_orgao
else:
del f.orgao
del f.dtenvio
del f.dtlimite
if f.validate():
if acao==u"Salvar":
if r:
try:
r.orgao_id = f.orgao.data
r.dtenvio = f.dtenvio.data
r.dtlimite = f.dtlimite.data
r.pauta = f.pauta.data
r.participantes = f.participantes.data
r.obs = f.obs.data
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="reuniao_%04d." % r.id)
r.arquivo = filename
db.session.commit()
flash(u"Reunião cadastrada com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
else:
if session['encaminhamento']:
dictenc = session.pop('encaminhamento')
if dictenc:
try:
h = Historico(denuncia_id)
h.acao_id = 2 # Fazer um encaminhamento
db.session.add(h)
db.session.flush()
r = Reuniao(h.id)
r.orgao_id = dictenc['orgao_id']
r.tipo_id = dictenc['tipo_id']
r.dtenvio = datestring_to_date(dictenc['dtenvio'])
r.dtlimite = datestring_to_date(dictenc['dtlimite'])
r.pauta = f.pauta.data
r.participantes = f.participantes.data
r.obs = f.obs.data
r.dtcriacao = func.sysdate()
db.session.add(r)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="reuniao_%04d." % r.id)
r.arquivo = filename
flash(u'O arquivo foi enviado!', u'success')
db.session.commit()
_altera_status(denuncia_id,3)
flash(u"Reunião cadastrada com sucesso", u"success")
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
flash(u"Retorne ao histórico e tente novamente", u"success")
if acao==u"Remover anexo":
if r:
r.arquivo = None
db.session.commit()
flash(u"Arquivo deletado!", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
return render_template('encaminhamento_reuniao.html', f=f, reuniao=r, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/email/<int:email_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/email/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def encaminhamento_email(denuncia_id=None, email_id=None):
denuncia = None
e = None
editar = False
f = EmailForm()
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
f.orgao.choices = option_orgao
if g.user:
f.de.data = g.user.email
if email_id:
e = db.session.query(Email).get(email_id)
if e:
editar = True
f.orgao.data = str(e.orgao_id)
f.dtenvio.data = e.dtenvio
f.dtlimite.data = e.dtlimite
f.de.data = e.de
f.para.data = e.para
f.assunto.data = e.assunto
f.texto.data = e.texto
acao = request.form.get("acao")
if request.method == 'POST':
f = EmailForm(request.form)
if email_id:
f.orgao.choices = option_orgao
else:
del f.orgao
del f.dtenvio
del f.dtlimite
if f.validate():
if acao==u"Enviar":
if e:
try:
e.orgao_id = f.orgao.data
e.dtenvio = f.dtenvio.data
e.dtlimite = f.dtlimite.data
e.de = f.de.data
e.para = f.para.data
e.assunto = f.assunto.data
e.texto = f.texto.data
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="email%04d." % e.id)
e.arquivo = filename
t = render_template('encaminhamento-email.txt', denuncia=d, encaminhamento=e, usuario=g.user)
if e.arquivo:
filepath = anexos_upload.path(filename)
msg = MIMEMultipart()
part1 = MIMEText(t, 'html', 'utf-8')
msg.attach(part1)
part2 = MIMEApplication(open(filepath,"rb").read())
part2.add_header('Content-Disposition', 'attachment', filename="%s" %os.path.basename(filepath))
msg.attach(part2)
else:
msg = MIMEText(t, 'html', 'utf-8')
from_header = 'sistema@cnddh.org.br'
subject = e.assunto
email_to = [e.para,]
email_cc = [g.user.email,]
msg['From'] = from_header
msg['Subject'] = '[Encaminhamento] ' + subject
msg['To'] = ', '.join(email_to)
if email_cc:
msg['Cc'] = ', '.join(email_cc)
msg['Reply-To'] = g.user.email
enviado = envia_email(from_header,email_to,msg.as_string())
if enviado:
db.session.commit()
flash(u"Email reenviado com sucesso", u"success")
else:
db.session.rollback()
flash(u"Email não foi enviado", u"error")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
else:
if session['encaminhamento']:
dictenc = session.pop('encaminhamento')
if dictenc:
try:
h = Historico(denuncia_id)
h.acao_id = 2 # Fazer um encaminhamento
db.session.add(h)
db.session.flush()
e = Email(h.id)
e.orgao_id = dictenc['orgao_id']
e.tipo_id = dictenc['tipo_id']
e.dtenvio = datestring_to_date(dictenc['dtenvio'])
e.dtlimite = datestring_to_date(dictenc['dtlimite'])
e.de = f.de.data
e.para = f.para.data
e.assunto = f.assunto.data
e.texto = f.texto.data
e.dtcriacao = func.sysdate()
db.session.add(e)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="email%04d." % e.id)
e.arquivo = filename
flash(u'O arquivo foi enviado!', u'success')
db.session.commit()
_altera_status(denuncia_id,3)
t = render_template('encaminhamento-email.txt', denuncia=d, encaminhamento=e, usuario=g.user)
if e.arquivo:
filepath = anexos_upload.path(filename)
msg = MIMEMultipart()
part1 = MIMEText(t, 'html', 'utf-8')
msg.attach(part1)
part2 = MIMEApplication(open(filepath,"rb").read())
part2.add_header('Content-Disposition', 'attachment', filename="%s" %os.path.basename(filepath))
msg.attach(part2)
else:
msg = MIMEText(t, 'html', 'utf-8')
from_header = 'sistema@cnddh.org.br'
subject = e.assunto
email_to = [e.para,]
#email_cc = [g.user.email,]
email_cc = None
msg['From'] = from_header
msg['Subject'] = '[Encaminhamento] ' + subject
msg['To'] = ', '.join(email_to)
if email_cc:
msg['Cc'] = ', '.join(email_cc)
msg['Reply-To'] = g.user.email
enviado = envia_email(from_header,email_to,msg.as_string())
if enviado:
db.session.commit()
flash(u"Email enviado com sucesso", u"success")
else:
db.session.rollback()
flash(u"Email não foi enviado", u"error")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
flash(u"Retorne ao histórico e tente novamente", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
if acao==u"Remover anexo":
if e:
e.arquivo = None
db.session.commit()
flash(u"Arquivo deletado!", u"success")
return render_template('encaminhamento_email.html', f=f,email=e, editar=editar)
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/generico/<int:generico_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/generico/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('faz-encaminhamento')
def encaminhamento_generico(denuncia_id=None, generico_id=None):
denuncia = None
g = None
editar = False
f = GenericoForm()
q = db.session.query(Orgao.id, Orgao.orgao).order_by(Orgao.orgao)
option_orgao = ([(str(x), y) for x,y in q.all()])
if generico_id:
g = db.session.query(Generico).get(generico_id)
f.orgao.choices = option_orgao
if g:
editar = True
f.orgao.data = str(g.orgao_id)
f.dtenvio.data = g.dtenvio
f.dtlimite.data = g.dtlimite
f.obs.data = g.obs
acao = request.form.get("acao")
if request.method == 'POST':
f = GenericoForm(request.form)
if generico_id:
f.orgao.choices = option_orgao
else:
del f.orgao
del f.dtenvio
del f.dtlimite
if f.validate():
if acao==u"Salvar":
if g:
try:
g.orgao_id = f.orgao.data
g.dtenvio = f.dtenvio.data
g.dtlimite = f.dtlimite.data
g.obs = f.obs.data
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="generico_%04d." % g.id)
g.arquivo = filename
db.session.commit()
flash(u"Encaminhamento genérico cadastrada com sucesso", u"success")
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
if session['encaminhamento']:
dictenc = session.pop('encaminhamento')
if dictenc:
try:
h = Historico(denuncia_id)
h.acao_id = 2 # Fazer um encaminhamento
db.session.add(h)
db.session.flush()
g = Generico(h.id)
g.orgao_id = dictenc['orgao_id']
g.tipo_id = dictenc['tipo_id']
g.dtenvio = datestring_to_date(dictenc['dtenvio'])
g.dtlimite = datestring_to_date(dictenc['dtlimite'])
g.obs = f.obs.data
g.dtcriacao = func.sysdate()
db.session.add(g)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="generico_%04d." % g.id)
g.arquivo = filename
flash(u'O arquivo foi enviado!', u'success')
db.session.commit()
_altera_status(denuncia_id,3)
flash(u"Encaminhamento genérico cadastrada com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
return redirect(url_for('timeline',denuncia_id=denuncia_id))
else:
flash(u"Retorne ao histórico e tente novamente", u"success")
if acao==u"Remover anexo":
if g:
g.arquivo = None
db.session.commit()
flash(u"Arquivo deletado!", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
return render_template('encaminhamento_generico.html', f=f, generico=g, editar=editar)
todos_encaminhamentos = with_polymorphic(Encaminhamento, [Oficio, Telefonema, Reuniao, Email, Generico])
@app.route('/denuncia/<int:denuncia_id>/cancelar/', methods=['GET', 'POST'])
@login_required
@checa_permissao('cancela-denuncia')
def cancelar_denuncia(denuncia_id=None):
d = None
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if request.method == 'POST':
f = MotivoForm(request.form)
if f.validate():
if _controle_status(d,4):
h = Historico(denuncia_id)
h.acao_id = 4 #Cancelar denuncia
h.motivo = f.motivo.data
d.status_id = 4 #Cancelada
db.session.add(h)
db.session.commit()
flash(u"Denúncia cancelada.", u"success")
return redirect(url_for('timeline',denuncia_id=d.id))
else:
flash(u"Denúncia não pode ser cancelada. A mudança de status não prevista, pode que esta denúncia já tenho sido cancelada.", u"error")
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
else:
f = MotivoForm()
return render_template('cancelar.html', f=f, d=d)
@app.route('/denuncia/<int:denuncia_id>/fechar/', methods=['GET', 'POST'])
@login_required
@checa_permissao('fecha-denuncia')
def fechar_denuncia(denuncia_id=None):
d = None
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
if request.method == 'POST':
f = MotivoForm(request.form)
if f.validate():
if _controle_status(d,5):
h = Historico(denuncia_id)
h.acao_id = 5 #Fechar denuncia
h.motivo = f.motivo.data
d.status_id = 5 #Fechada
db.session.add(h)
db.session.commit()
flash(u"Denúncia fechada e não poderá mais fazer encaminhamentos e receber retornos.", u"success")
return redirect(url_for('timeline',denuncia_id=d.id))
else:
flash(u"Denúncia não pode ser fechada. Esta mudança de status não está prevista, pode que esta denúncia já tenho sido fechada.", u"error")
else:
flash(u"Verifique os erros no formulário abaixo.", u"error")
else:
f = MotivoForm()
return render_template('fechar.html', f=f, d=d)
@app.route('/denuncia/<int:denuncia_id>/timeline', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/timeline/historico/<int:historico_id>/deletar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/timeline/historico/<int:historico_id>/retorno/<int:retorno_id>/deletar', methods=['GET', 'POST'])
@login_required
@checa_permissao('visualiza-timeline')
def timeline(denuncia_id=None,historico_id=None,retorno_id=None):
denuncia = None
if denuncia_id:
q = db.session.query(Denuncia).filter(Denuncia.id==denuncia_id)
q = q.options(joinedload_all(Denuncia.historico))
denuncia = q.one()
dtatual = datetime.datetime.today()
if historico_id and retorno_id:
r = db.session.query(Retorno).get(retorno_id)
db.session.delete(r)
db.session.commit()
flash(u"Retorno deletado!", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
elif historico_id and not retorno_id:
h = db.session.query(Historico).get(historico_id)
db.session.delete(h)
db.session.commit()
flash(u"Encaminhamento deletado!", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
return render_template('timeline.html', denuncia=denuncia, dtatual=dtatual)
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/<int:encaminhamento_id>/retorno/<int:retorno_id>/editar', methods=['GET', 'POST'])
@app.route('/denuncia/<int:denuncia_id>/encaminhamento/<int:encaminhamento_id>/retorno/novo', methods=['GET', 'POST'])
@login_required
@checa_permissao('recebe-retorno')
def receber_retorno(denuncia_id=None, encaminhamento_id=None, retorno_id=None):
denuncia = None
e = None
r = None
if denuncia_id:
d = db.session.query(Denuncia).get(denuncia_id)
q = db.session.query(TipoRetorno.id, TipoRetorno.nome).order_by(TipoRetorno.nome)
option_retorno = []
option_retorno.extend([(str(x), y) for x,y in q.all()])
f = RetornoForm()
f.retorno.choices = option_retorno
if retorno_id:
r = db.session.query(Retorno).get(retorno_id)
if r:
f.descricao.data = r.descricao
f.retorno.data = str(r.tiporetorno_id)
f.dtretorno.data = r.dtretorno
if r.tiporetorno.tipo =='retornopessoasassistidas':
lista = r.tipoassistencia
for i in lista:
if not i:
lista.remove(i)
f.tipoassistencia.data = lista
elif r.tiporetorno.tipo =='retornoinquerito':
f.ip.data = r.ip
f.situacaoip.data = r.situacao
f.motivo.data = r.motivo
elif r.tiporetorno.tipo =='retornoprocesso':
f.np.data = r.np
f.situacaop.data = r.situacao
elif r.tiporetorno.tipo =='retornobo':
f.bo.data = r.bo
elif r.tiporetorno.tipo =='retornorco':
f.rco.data = r.rco
elif r.tiporetorno.tipo =='retornoreds':
f.reds.data = r.reds
elif r.tiporetorno.tipo =='retornopoliticapsr':
lista = r.tipopolitica
for i in lista:
if not i:
lista.remove(i)
f.tipopolitica.data = lista
else:
f.observacao.data = r.observacao
if denuncia_id and encaminhamento_id:
e = db.session.query(Encaminhamento).get(encaminhamento_id)
acao = request.form.get("acao")
if request.method == 'POST':
f = RetornoForm(request.form)
f.retorno.choices = option_retorno
f.tipoassistencia.choices = tipoassistencia_choices
f.tipopolitica.poices = politicas_choices
if f.validate():
if acao==u"Salvar":
if e:
try:
tr = db.session.query(TipoRetorno).get(f.retorno.data)
if tr:
if tr.tipo =='retornopessoasassistidas':
if not retorno_id:
r = RetornoPessoasassistidas(e.id)
aux = ''
for ta in f.tipoassistencia.data:
aux = aux + ',' + ta
r.tipoassistencia = aux
elif tr.tipo =='retornoinquerito':
if not retorno_id:
r = RetornoInquerito(e.id)
r.ip = f.ip.data
r.situacao = f.situacaoip.data
r.motivo = f.motivo.data
elif tr.tipo =='retornoprocesso':
if not retorno_id:
r = RetornoProcesso(e.id)
r.np = f.np.data
r.situacao = f.situacaop.data
elif tr.tipo =='retornobo':
if not retorno_id:
r = RetornoBO(e.id)
r.bo = f.bo.data
elif tr.tipo =='retornorco':
if not retorno_id:
r = RetornoRCO(e.id)
r.rco = f.rco.data
elif tr.tipo =='retornoreds':
if not retorno_id:
r = RetornoREDS(e.id)
e.reds = f.reds.data
elif tr.tipo =='retornopoliticapsr':
if not retorno_id:
r = RetornoPoliticaPSR(e.id)
aux = ''
for p in f.tipopolitica.data:
aux = aux + ',' + p
r.tipopolitica = aux
else:
if not retorno_id:
r = RetornoGenerico(e.id)
r.observacao = f.observacao.data
if r:
r.descricao = f.descricao.data
r.tiporetorno_id = f.retorno.data
r.dtretorno = f.dtretorno.data
e.dtretorno = r.dtretorno
db.session.add(r)
db.session.flush()
ff = request.files.get('arquivo', None)
if ff:
filename = anexos_upload.save(ff, name="retorno_%04d." % r.id)
r.arquivo = filename
except UploadNotAllowed:
db.session.rollback()
flash(u'O upload deste arquivo não é permitido devido questões de segurança, altere o tipo do arquivo para as extensões permitidas.', u'error')
db.session.commit()
flash(u"Retorno cadastrado com sucesso", u"success")
return redirect(url_for('timeline',denuncia_id=denuncia_id))
if acao==u"Remover anexo":
if r:
r.arquivo = None
db.session.commit()
flash(u"Arquivo deletado!", u"success")
else:
for field, errors in f.errors.items():
for error in errors:
flash(u'Erro %s no campo %s' %(field,error), u"error")
return render_template('retorno.html', f=f, d=d, retorno=r)
@app.route('/denuncia/relatorio/graficos/violacaoporestado', methods=['GET', 'POST'])
@login_required
def grafico_violacaoporestado():
f = GraficoViolacaoForm()
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
f.macrocategoria.choices = option_tpviolacao
if f.macrocategoria:
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria==f.macrocategoria.data)
return render_template('grafico_violacaoporestado.html', f=f)
@app.route('/denuncia/relatorio/graficos/violacaoporcidade', methods=['GET', 'POST'])
@login_required
def grafico_violacaoporcidade():
f = GraficoViolacaoForm()
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
f.macrocategoria.choices = option_tpviolacao
if f.macrocategoria:
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria==f.macrocategoria.data)
return render_template('grafico_violacaoporcidade.html', f=f)
@app.route('/denuncia/relatorio/graficos/violacaopormacro', methods=['GET', 'POST'])
@login_required
def grafico_violacaopormacro():
f = GraficoViolacaoForm()
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
f.macrocategoria.choices = option_tpviolacao
return render_template('grafico_violacaopormacro.html', f=f)
@app.route('/denuncia/relatorio/graficos/violacaoportempo', methods=['GET', 'POST'])
@login_required
def grafico_violacaoportempo():
f = GraficoViolacaoForm()
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
f.macrocategoria.choices = option_tpviolacao
if f.macrocategoria:
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria==f.macrocategoria.data)
return render_template('grafico_violacaoportempo.html', f=f)
@app.route('/denuncia/relatorio/graficos/violacaoporsupeito', methods=['GET', 'POST'])
@login_required
def grafico_violacaoporsuspeito():
f = GraficoViolSuspForm()
q = db.session.query(TipoViolacao.macrocategoria, TipoViolacao.macrocategoria)
q = q.group_by(TipoViolacao.macrocategoria)
option_tpviolacao = ([(x, y) for x,y in q.all()])
f.macrocategoria.choices = option_tpviolacao
if f.macrocategoria:
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria==f.macrocategoria.data)
q = db.session.query(TipoSuspeito.tipo,TipoSuspeito.tipo)
q = q.group_by(TipoSuspeito.tipo)
option_tipo = [(u"", u"")]
option_tipo.extend([(x, y) for x,y in q.all()])
f.tiposuspeito.choices = option_tipo
return render_template('grafico_violacaoporsuspeito.html', f=f)
@app.route('/permissoes/permissoes/', methods=['GET', 'POST'])
@app.route('/permissoes/permissoes/<int:permissao>', methods=['GET', 'POST'])
@login_required
@checa_permissao('cria-permissoes')
def permissoes_permissoes(permissao=None):
if request.method == 'POST':
f = PermissaoForm(request.form)
if f.validate():
p = Permissao()
f.populate_obj(p)
db.session.add(p)
try:
db.session.commit()
flash(u'Permissão criada com sucesso.', u'success')
except:
db.session.rollback()
flash(u'Ocorreu um erro - verifique se o nome é unico.', u'error')
f = PermissaoForm()
else:
f = PermissaoForm()
q = db.session.query(Permissao).order_by(Permissao.id)
permissoes = q.all()
return render_template('permissoes_permissoes.html', permissoes=permissoes, f=f)
@app.route('/permissoes/perfis/', methods=['GET', 'POST'])
@login_required
@checa_permissao('altera-perfil')
def permissoes_perfis():
if request.method == 'POST':
f = PermissaoForm(request.form)
if f.validate():
if g.user.checa_permissao('cria-perfil'):
p = Perfil()
f.populate_obj(p)
db.session.add(p)
try:
db.session.commit()
flash(u'Perfil criado com sucesso.', u'success')
except:
db.session.rollback()
flash(u'Ocorreu um erro.', u'error')
else:
abort(401)
f = PermissaoForm()
else:
f = PermissaoForm()
q = db.session.query(Perfil)
perfis = q.all()
return render_template('permissoes_perfis.html', perfis=perfis, f=f)
@app.route('/permissoes/perfis/<int:perfil>', methods=['GET', 'POST'])
@login_required
@checa_permissao('altera-perfil')
def permissoes_perfis_edit(perfil):
q = db.session.query(Perfil)
q = q.filter(Perfil.id == perfil)
p = q.first()
if not p:
abort(404)
if request.method == 'POST':
f = PermissaoForm(request.form, obj=p)
if f.validate():
if f.nome.data != p.nome:
p.nome = f.nome.data
if f.descricao.data != p.descricao:
p.descricao = f.descricao.data
_perms = request.form.getlist('perms')
perms = [int(x) for x in _perms]
for pt in p.permissoesperfis:
if pt.permissao_id in perms:
#print "permissao %d existe no perfil, ignorando" % pt.permissao_id
perms.remove(pt.permissao_id)
else:
#print "removendo permissao %d do perfil" % pt.permissao_id
#remove permissao
db.session.delete(pt)
#print "Permissoes a serem adicionadas: ", perms #DEBUG
for p_id in perms:
np = PermissaoPerfil()
np.permissao_id = p_id
np.perfil_id = perfil
np.tipo = 0
db.session.add(np)
flash(u'Permissões alteradas.', u'success')
db.session.commit()
else:
f = PermissaoForm(obj = p)
q = db.session.query(Permissao, PermissaoPerfil)
q = q.outerjoin(PermissaoPerfil, and_(PermissaoPerfil.permissao_id == Permissao.id, PermissaoPerfil.perfil_id == perfil))
q = q.order_by(Permissao.nome)
permissoes = q.all()
return render_template('permissoes_perfis_edit.html', f=f, permissoes=permissoes, p=p)
@app.route('/permissoes/usuarios/', methods=['GET', 'POST'])
@login_required
@checa_permissao('altera-permissoes')
def permissoes_usuarios():
usuarios = None
if request.method == 'POST':
f = PesquisaUsuarioForm(request.form)
if f.validate():
login = (f.login.data or '').strip()
nome = (f.nome.data or '').strip()
#email = (f.email.data or '').strip()
q = db.session.query(Usuario)
if login:
q = q.filter(Usuario.login == login)
if nome:
q = q.filter(Usuario.nome.contains(nome))
#if email:
# q = q.filter(Usuario.email.contains(email))
usuarios = q.all()
else:
flash(u'Nenhum campo de busca foi preenchido. Busca não realizada', u'notice')
else:
f = PesquisaUsuarioForm()
return render_template('permissoes_usuarios.html', form=f, usuarios=usuarios)
@app.route('/permissoes/usuarios/<int:usuario>', methods=['GET', 'POST'])
@login_required
@checa_permissao('altera-permissoes')
def permissoes_usuarios_permissoes_edit(usuario):
if request.method == 'POST':
q = db.session.query(PermissaoUsuario)
q = q.filter(PermissaoUsuario.usuario_id == usuario)
permissoesusuarios = q.all()
_perms = request.form.getlist('perms')
perms = [int(x) for x in _perms]
for pt in permissoesusuarios:
if pt.permissao_id in perms:
#print "permissao %d existe no perfil, ignorando" % pt.permissao_id
perms.remove(pt.permissao_id)
else:
#print "removendo permissao %d do perfil" % pt.permissao_id
#remove permissao
db.session.delete(pt)
#print "Permissoes a serem adicionadas: ", perms #DEBUG
for p_id in perms:
np = PermissaoUsuario()
np.permissao_id = p_id
np.usuario_id = usuario
np.tipo = 0
db.session.add(np)
flash(u'Permissões alteradas.', u'success')
db.session.commit()
q = db.session.query(Permissao, PermissaoUsuario)
q = q.outerjoin(PermissaoUsuario, and_(PermissaoUsuario.permissao_id == Permissao.id, PermissaoUsuario.usuario_id == usuario))
q = q.order_by(Permissao.nome)
permissoes = q.all()
if usuario:
u = db.session.query(Usuario).get(usuario)
return render_template('permissoes_usuarios_permissoes_edit.html', permissoes=permissoes, u=u)
@app.route('/permissoes/usuarios/<int:usuario>/perfis', methods=['GET', 'POST'])
@login_required
@checa_permissao('altera-perfil')
def permissoes_usuarios_perfis_edit(usuario):
if request.method == 'POST':
q = db.session.query(PerfilUsuario)
q = q.filter(PerfilUsuario.usuario_id == usuario)
perfisusuarios = q.all()
_perfs = request.form.getlist('perfs')
perfs = [int(x) for x in _perfs]
for pt in perfisusuarios:
if pt.perfil_id in perfs:
perfs.remove(pt.perfil_id)
else:
#print "removendo permissao %d do perfil" % pt.permissao_id
#remove permissao
db.session.delete(pt)
#print "Permissoes a serem adicionadas: ", perms #DEBUG
for p_id in perfs:
np = PerfilUsuario()
np.perfil_id = p_id
np.usuario_id = usuario
db.session.add(np)
flash(u'Perfis alterados.', u'success')
db.session.commit()
q = db.session.query(Perfil, PerfilUsuario)
q = q.outerjoin(PerfilUsuario, and_(PerfilUsuario.perfil_id == Perfil.id, PerfilUsuario.usuario_id == usuario))
q = q.order_by(Perfil.nome)
perfis = q.all()
if usuario:
u = db.session.query(Usuario).get(usuario)
return render_template('permissoes_usuarios_perfis_edit.html', perfis=perfis, u=u)
@app.route('/permissoes')
@login_required
@checa_permissao('altera-permissoes')
def permissoes():
if g.user:
t = u"Usuário: %d" % (g.user.id) + "</br>"
t += u"Nome: %s" % (g.user.nome) + "</br>"
t += u"Permissões no usuário: </br>"
for p in g.user.permissoes:
t += str(p.permissao.nome) + "</br>"
t += u"Permissões no perfil: </br>"
for pf in g.user.perfis:
for pp in pf.perfil.permissoesperfis:
t += str(pp.permissao.nome) + "</br>"
t += u"Checa permissões: %s" % g.user.checa_permissao('test') + "</br>"
t += u"User agent: %s" % request.headers.get('User-Agent')
return t
else:
return "Não tem usuário"
from cnddh.decoder import killgremlins
@app.route('/ajax/cidades', methods=['GET', 'POST'])
@login_required
def ajax_cidades():
estado = request.values.get('estado', '').strip()
q = db.session.query(Cidade.cidade, Cidade.cidade)
q = q.filter(Cidade.estado == estado)
q = q.order_by(Cidade.cidade)
a = []
a.extend([(x, y) for x,y in q.all()])
return jsonify(result=a)
@app.route('/ajax/denuncia/<int:denuncia_id>/vitima/<int:vitima_id>/delete', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-vitima')
def ajax_vitima_delete(denuncia_id=None,vitima_id=None):
a = []
if denuncia_id:
if vitima_id:
v = db.session.query(Vitima).get(vitima_id);
id = v.id
nome = v.nome
db.session.delete(v)
db.session.commit()
a=[id,nome]
return jsonify(result=a)
@app.route('/ajax/instituicao', methods=['GET', 'POST'])
@login_required
def ajax_instituicao():
tipo = request.values.get('tipo', '').strip()
q = db.session.query(TipoSuspeito.instituicao,TipoSuspeito.instituicao)
q = q.filter(TipoSuspeito.tipo == tipo)
q = q.group_by(TipoSuspeito.instituicao)
q = q.order_by(TipoSuspeito.instituicao)
a = q.all()
return jsonify(result=a)
@app.route('/ajax/classificacao', methods=['GET', 'POST'])
@login_required
def ajax_classificacao():
tipo = request.values.get('tipo', '').strip()
instituicao = request.values.get('instituicao', '').strip()
q = db.session.query(TipoSuspeito.id,TipoSuspeito.classificacao)
q = q.filter(TipoSuspeito.tipo == tipo)
q = q.filter(TipoSuspeito.instituicao == instituicao)
q = q.order_by(TipoSuspeito.classificacao)
a = q.all()
return jsonify(result=a)
@app.route('/ajax/microcategoria', methods=['GET', 'POST'])
@login_required
def ajax_microcategoria():
macrocategoria = request.values.get('macrocategoria', '').strip()
q = db.session.query(TipoViolacao.id, TipoViolacao.microcategoria)
q = q.filter(TipoViolacao.macrocategoria == macrocategoria)
q = q.order_by(TipoViolacao.microcategoria)
a = q.all()
return jsonify(result=a)
@app.route('/ajax/violacao/delete', methods=['GET', 'POST'])
@login_required
@checa_permissao('edita-violacoes')
def ajax_violacao_delete():
vitsup = {}
violacao_id = request.values.get('violacao_id', '').strip()
if violacao_id:
v = db.session.query(Violacao).get(violacao_id);
if v:
db.session.delete(v)
db.session.commit()
q = db.session.query(Violacao).filter(Violacao.denuncia_id==v.denuncia_id)
q = q.options(joinedload_all(Violacao.vitima))
q = q.options(joinedload_all(Violacao.suspeito))
q = q.options(joinedload_all(Violacao.tipoviolacao))
objs = q.all()
denuncia = db.session.query(Denuncia).get(v.denuncia_id)
vitsup = {}
for v in objs:
if ('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome) in vitsup:
vitsup[('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome)].append(v)
else:
vitsup[('['+ str(v.vitima.id) +']' + v.vitima.nome + '|' + '['+ str(v.suspeito.id) + ']' + v.suspeito.nome)] = [v]
return render_template('ajax_relacao_violacao.html', vitsup=vitsup, denuncia=denuncia)
@app.route('/ajax/exemplofonte', methods=['GET', 'POST'])
@login_required
def ajax_exemplofonte():
a = []
tipofonte_id = request.values.get('tipofonte', '').strip()
if tipofonte_id:
tf = db.session.query(TipoFonte).get(tipofonte_id);
exemplo = tf.exemplo
a=[exemplo]
return jsonify(result=a)
@app.route('/ajax/grafico/violacaoporestado', methods=['GET', 'POST'])
@login_required
def ajax_violacaoporestado():
microcategoria = request.values.get('microcategoria', '').strip()
dtocorinicio = request.values.get('dtocorinicio', '').strip()
dtocorfim = request.values.get('dtocorfim', '').strip()
tv = db.session.query(TipoViolacao).get(microcategoria)
q = db.session.query(Denuncia.estado,func.count(Denuncia.estado))
if dtocorinicio:
q = q.filter(Denuncia.dtdenuncia >= datestring_to_date(dtocorinicio))
if dtocorfim:
q = q.filter(Denuncia.dtdenuncia < (datestring_to_date(dtocorfim) + datetime.timedelta(days=1)) )
q = q.join(Denuncia.violacoes)
q = q.filter(Violacao.tipoviolacoes_id==microcategoria)
q = q.group_by(Denuncia.estado)
q = q.order_by(func.count(Denuncia.estado))
a = []
if q.all():
a.append(['Estado', tv.microcategoria])
for est, quant in q.all():
a.append([est,quant])
return jsonify(result=a)
@app.route('/ajax/grafico/violacaoporcidade', methods=['GET', 'POST'])
@login_required
def ajax_violacaoporcidade():
microcategoria = request.values.get('microcategoria', '').strip()
dtocorinicio = request.values.get('dtocorinicio', '').strip()
dtocorfim = request.values.get('dtocorfim', '').strip()
tv = db.session.query(TipoViolacao).get(microcategoria)
q = db.session.query(Denuncia.cidade,func.count(Denuncia.cidade))
if dtocorinicio:
q = q.filter(Denuncia.dtdenuncia >= datestring_to_date(dtocorinicio))
if dtocorfim:
q = q.filter(Denuncia.dtdenuncia < (datestring_to_date(dtocorfim) + datetime.timedelta(days=1)) )
q = q.join(Denuncia.violacoes)
q = q.filter(Violacao.tipoviolacoes_id==microcategoria)
q = q.group_by(Denuncia.cidade)
q = q.order_by(func.count(Denuncia.cidade))
a = []
if q.all():
a.append(['Cidade', tv.microcategoria])
for cid,quant in q.all():
a.append([cid,quant])
return jsonify(result=a)
@app.route('/ajax/grafico/violacaopormacro', methods=['GET', 'POST'])
@login_required
def ajax_violacaopormacro():
macrocategoria = request.values.get('macrocategoria', '').strip()
dtocorinicio = request.values.get('dtocorinicio', '').strip()
dtocorfim = request.values.get('dtocorfim', '').strip()
q = db.session.query(TipoViolacao.microcategoria, func.count(TipoViolacao.microcategoria))
if dtocorinicio:
q = q.filter(Denuncia.dtdenuncia >= datestring_to_date(dtocorinicio))
if dtocorfim:
q = q.filter(Denuncia.dtdenuncia < (datestring_to_date(dtocorfim) + datetime.timedelta(days=1)) )
q = q.join(TipoViolacao.violacoes)
q = q.filter(TipoViolacao.macrocategoria==macrocategoria)
q = q.group_by(TipoViolacao.microcategoria)
q = q.order_by(func.count(TipoViolacao.microcategoria))
a = []
if q.all():
a.append(['Microcategoria', macrocategoria])
for mic,quant in q.all():
a.append([mic,quant])
return jsonify(result=a)
@app.route('/ajax/grafico/violacaoportempo', methods=['GET', 'POST'])
@login_required
def ajax_violacaoportempo():
microcategoria = request.values.get('microcategoria', '').strip()
dtocorinicio = request.values.get('dtocorinicio', '').strip()
dtocorfim = request.values.get('dtocorfim', '').strip()
tv = db.session.query(TipoViolacao).get(microcategoria)
q = db.session.query(sa.func.month(Denuncia.dtdenuncia), func.count(sa.func.month(Denuncia.dtdenuncia)))
if dtocorinicio:
q = q.filter(Denuncia.dtdenuncia >= datestring_to_date(dtocorinicio))
if dtocorfim:
q = q.filter(Denuncia.dtdenuncia < (datestring_to_date(dtocorfim) + datetime.timedelta(days=1)) )
q = q.join(Denuncia.violacoes)
q = q.filter(Violacao.tipoviolacoes_id==microcategoria)
q = q.group_by(sa.func.month(Denuncia.dtdenuncia))
q = q.order_by(sa.func.month(Denuncia.dtdenuncia))
a = []
b = []
if q.all():
a.append(['Meses', tv.microcategoria])
meses = {1:'JAN', 2:'FEV', 3:'MAR', 4:'ABR', 5:'MAI', 6:'JUN',7:'JUL', 8:'AGO', 9:'SET', 10:'OUT', 11:'NOV', 12:'DEZ'}
for m,quant in q.all():
a.append([meses[m],quant])
#for i in range(1,13):
# b.append([i,0])
#for list in b:
# for m,quant in q.all():
# if list[0] == m:
# list[1]= quant
#for list in b:
# list[0] = meses[list[0]]
# a.append(list)
return jsonify(result=a)
@app.route('/ajax/grafico/violacaoporsuspeito', methods=['GET', 'POST'])
@login_required
def ajax_violacaoporsuspeito():
microcategoria = request.values.get('microcategoria', '').strip()
tiposuspeito = request.values.get('tiposuspeito', '').strip()
tv = db.session.query(TipoViolacao).get(microcategoria)
q = db.session.query(sa.func.monthname(Denuncia.dtdenuncia), func.count(sa.func.month(Denuncia.dtdenuncia)))
q = q.join(Denuncia.violacoes)
q = q.join(Denuncia.suspeitos)
q = q.join(Suspeito.tiposuspeito)
q = q.filter(Violacao.tipoviolacoes_id==microcategoria)
q = q.filter(TipoSuspeito.tipo==tiposuspeito)
q = q.group_by(sa.func.month(Denuncia.dtdenuncia))
q = q.order_by(sa.func.month(Denuncia.dtdenuncia))
a = []
if q.all():
a.append(['Meses', tv.microcategoria])
for m,quant in q.all():
a.append([m,quant])
return jsonify(result=a)
if not DEBUG:
@app.errorhandler(500)
def internalerror(e):
import sys, traceback, time
from utils import envia_email_excecao
eid = "%d-%d" % (time.time(), id(e))
db.session.rollback()
exc_info = traceback.format_exc()
envia_email_excecao(str(eid), exc_info)
return render_template('500.html', eid=eid), 500
@app.route('/shutdown', methods=['GET', 'POST'])
@login_required
def shutdown():
shutdown_server()
logger.info(u'Server shutting down...')
return 'Server shutting down...'
def log(acao):
try:
logger.info(g.user.login + ' ' + acao)
except:
logger.info(acao)
@app.route('/teste', methods=['GET', 'POST'])
def teste_email():
from_header = 'sistema@cnddh.org.br'
subject = 'Teste de email'
email_to = ['dedeco@gmail.com',]
email_cc = None
t = render_template('encaminhamento-teste.txt')
msg = MIMEText(t, 'html', 'latin-1')
msg['From'] = from_header
msg['Subject'] = subject
msg['To'] = ', '.join(email_to)
msg['Reply-To'] = 'dedeco@gmail.com'
if email_cc:
msg['Cc'] = ', '.join(email_cc)
#if email_bcc:
# msg['Bcc'] = ', '.join(email_bcc)
enviado = envia_email(from_header,email_to,msg.as_string())
if enviado:
return 'Email enviado'
else:
return 'Nops' |
import argparse
import torch.utils.data
import random
import time
import numpy as np
from pydoc import locate
import scipy.misc
import csv
import os
import option
import models
import datasets
import utils
from point_cloud import Depth2BEV
from tqdm import tqdm
opt = option.make(argparse.ArgumentParser())
#d = datasets.IntPhys(opt, 'paths_test')
d = datasets.IntPhys(opt, 'paths_val')
indices = [4246]
#indices = list(range(1, 5000))
#indices = list(range(1200,1300))
valLoader = torch.utils.data.DataLoader(
d,
1,
num_workers=opt.nThreads,
sampler=datasets.SubsetSampler(indices)
)
opt.nbatch_val = len(valLoader)
print(opt)
np.random.seed(opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
if opt.gpu:
torch.cuda.manual_seed_all(opt.manualSeed)
model = locate('models.%s' %opt.model)(opt)
if opt.load:
model.load(opt.load, 'test')
if opt.gpu:
model.gpu()
print('n parameters: %d' %sum([m.numel() for m in model.parameters()]))
false_positives = 0
false_negatives = 0
true_positives = 0
# precision = true_positives / (true_positives + false_positives)
# recall = true_positives / (true_positives + false_negatives)
overlap_ratios = [1.0]
#conf_threshs = [0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.98]
conf_threshs = [0.9]
ball_radii = [opt.ball_radius]
#outfile = open(os.path.join(opt.results, 'detector_eval_results.csv'), 'w+')
#fieldnames = ['conf thresh', 'overlap ratio', 'ball radius', 'precision', 'recall']
#csv_writer = csv.DictWriter(outfile, fieldnames=fieldnames)
#csv_writer.writeheader()
if opt.save_detections:
#detections_file = open(os.path.join('/data/pemami/intphys/val_detections.csv'), 'w+')
detections_file = open(os.path.join(opt.results, 'val_detections.csv'), 'w+')
#fieldnames = ['height','depth','width','p(i-1;j-1)', 'p(i;j-1)', 'p(i+1;j-1)', 'p(i-1;j)', 'p(i;j)', 'p(i+1;j)', 'p(i-1;j+1)', 'p(i;j+1)', 'p(i+1;j+1)']
fieldnames = ['idx', 'height', 'depth', 'width', 'p']
det_csv_writer = csv.DictWriter(detections_file, fieldnames=fieldnames)
det_csv_writer.writeheader()
viz = utils.Viz(opt)
model.eval()
i = d.indices[0]
for radius in ball_radii:
opt.ball_radius = radius
model.bev_pixor.ball_radius = radius
model.fv_pixor.ball_radius = radius
for overlap_ratio in overlap_ratios:
for conf_th in conf_threshs:
opt.conf_thresh = conf_th
model.bev_pixor.conf_thresh = conf_th
model.fv_pixor.conf_thresh = conf_th
for data in tqdm(valLoader):
#pc = data[0]['point_cloud'].squeeze()
# numpy array [N,4]
#detections, bev_scores, fv_scores, labeled_bev_scores, labeled_fv_scores = model.predict(data, d.depth2bev)
detections, dets_px, bev_scores, _ = model.predict(data, d.depth2bev)
print(detections)
objects = data[1]['objects']
found_objs = 0
for det, dp in zip(detections, dets_px):
j = -1; found = False
for obj in objects:
obj = obj.numpy()[0]
j += 1
#dist_bev = np.linalg.norm(obj[0:2] - det[0:2])
#dist_fv = abs(obj[2] - det[2])
#if dist_bev/(overlap_ratio * 2 * opt.ball_radius) <= 1. and \
# dist_fv/(overlap_ratio * 4 * opt.ball_radius) <= 1.:
dist = np.linalg.norm(obj - det)
if dist/(overlap_ratio * 2 * opt.ball_radius) <= 1.:
found = True
true_positives += 1
found_objs += 1
print(det)
print(obj)
x, y, z=dp[0], dp[1], dp[2]
x = int(round(x/4))
y = int(round(y/4))
z = int(round(z/4))
print(bev_scores[x,y])
if opt.save_detections:
x, y, z=dp[0], dp[1], dp[2]
x = int(round(x/4))
y = int(round(y/4))
z = int(round(z/4))
det_csv_writer.writerow({'idx': i, 'height': round(det[2],3), 'depth': round(det[0],3), 'width': round(det[1],3), 'p': bev_scores[x,y]})
break
if found:
del objects[j]
# this should be 0 if all objects were accounted for
false_negatives += len(objects)
false_positives += len(detections) - found_objs
# store bev scores for false negatives
if opt.save_detections:
for obj in objects:
y,x = d.depth2bev.point_2_grid_cell(obj[0], scale=4)
det_csv_writer.writerow({'idx': i, 'height': round(obj[0,2].item(),3), 'depth': round(obj[0,0].item(),3), 'width': round(obj[0,1].item(),3), 'p': bev_scores[x,y]})
if opt.image_save or opt.visdom:
start = time.time()
Depth2BEV.display_point_cloud(pc, '3d', detections, opt.ball_radius, True, name='eval_imgs/{}.png'.format(i))
diff = time.time() - start
print("display pc time: {}".format(diff))
"""
bev_scores = scipy.misc.imresize(bev_scores * 255, 200)
scipy.misc.imsave('eval_imgs/bev_scores_{}.png'.format(i), bev_scores)
fv_scores = scipy.misc.imresize(fv_scores * 255, 200)
scipy.misc.imsave('eval_imgs/fv_scores_{}.png'.format(i), fv_scores)
labeled_bev_scores = scipy.misc.imresize(labeled_bev_scores, 200)
scipy.misc.imsave('eval_imgs/labeled_bev_scores_{}.png'.format(i), labeled_bev_scores)
labeled_fv_scores = scipy.misc.imresize(labeled_fv_scores, 200)
scipy.misc.imsave('eval_imgs/labeled_fv_scores_{}.png'.format(i), labeled_fv_scores)
tmp = data[0]['FV'][0][0] * 255
for j in range(1,data[0]['FV'][0].shape[0]):
tmp |= data[0]['FV'][0][j,:,:] * 255
scipy.misc.imsave('eval_imgs/fv_{}.png'.format(i), tmp)
"""
i += 1
prec = 0
tp_fp = true_positives + false_positives
if tp_fp > 0:
prec = true_positives / tp_fp
recall = 0
tp_fn = true_positives + false_negatives
if tp_fn > 0:
recall = true_positives / tp_fn
print("threshold: {}, ball radius: {}, overlap ratio: {}, precision: {}, recall: {}".format(
opt.conf_thresh, opt.ball_radius, overlap_ratio, prec, recall))
csv_writer.writerow({'conf thresh': opt.conf_thresh, 'overlap ratio': overlap_ratio,
'ball radius': opt.ball_radius, 'precision': prec, 'recall': recall})
false_positives = 0
false_negatives = 0
true_positives = 0
outfile.close()
print('Done')
|
#!/home/jupyter/py-env/python2.7.13/bin/python2.7
import theano
a = theano.tensor.vector() # declare variable
out = a + a # build symbolic expression
f = theano.function([a], out) # compile function
print(f([0, 1, 2]))
|
import pyrealsense2 as rs
import numpy as np
import time
def initialize_camera():
# start the frames pipe
p = rs.pipeline()
conf = rs.config()
conf.enable_stream(rs.stream.accel)
conf.enable_stream(rs.stream.gyro)
prof = p.start(conf)
return p
def gyro_data(gyro):
return np.asarray([gyro.x, gyro.y, gyro.z])
def accel_data(accel):
return np.asarray([accel.x, accel.y, accel.z])
p = initialize_camera()
try:
while True:
f = p.wait_for_frames()
accel = accel_data(f[0].as_motion_frame().get_motion_data())
gyro = gyro_data(f[1].as_motion_frame().get_motion_data())
# print("accelerometer: ", accel)
print("gyro: ", gyro)
time.sleep(0.2)
finally:
p.stop() |
# -*- coding: utf-8 -*-
"""
server side code
Created on Mon Nov 7 03:36:00 2016
@author: Shriharsh Ambhore
@author: Kandhasamy Rajasekaran
@author: Daniel Akbari
"""
import socket
import sys
server_socket=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
try:
server_socket.bind(('localhost',8080))
except socket.error as msg:
print ('Bind to socket failed.'+' Message ' + msg[1])
sys.exit()
server_socket.listen(1)
while True:
conn,address=server_socket.accept()
dataFromClient=conn.recv(4096)
url=dataFromClient.decode()
print('Data from client:',dataFromClient) # data received from client
## at any point of time characters :?#/[]@ are reserved as delimeters
pos=url.find(":")
subdomainlist = []
if pos>0:
tempvar=url[:pos]
if url[pos:].find("//")> 0: # if the url contains the protocol then execute this block i.e https://www.google.com
protocol=tempvar #
print("protocol---",protocol)
url=url[pos+3:] ## ignoring : and //
if ":" in url: ## check for : to get the domain
domain,url=url.split(":")
print("domain---",domain)
#subdomain= domain[:(domain.find(".",1))]
subdomainlist.append(domain)
gblsd = ""
while domain.find(".")!=-1: # find . in the domain for finding the subdomain
remainingUrl,sd=domain.rsplit(".",1) # split from right hand side which gives www.example, com
sd="."+sd+gblsd
subdomainlist.append(sd.lstrip(".")) #add valid string to the list
gblsd=sd # change the string from www.example.com to www.example
domain=remainingUrl
print("subdomain---",subdomainlist)
if "#" in url: ## split on # to get the fragment
url,anchor=url.split("#",1)
print("fragment---",anchor)
if "?" in url: ## split on ? to get the query
url,query=url.split("?",1)
print("query---",query)
if "/" in url: ## split on / to get the port and the path
port,path=url.split("/",1)
if port.isdigit():
print("port---",port)
else:
print("domain",port)
subdomain= port[:(port.rfind(".",2))]
print("subdomain---",subdomain)
print("path---",path)
else: # if the url does not contain any protocol specified i.e www.google.com
domain=tempvar
print("domain:",domain)
if ":" in url:
domain,url=url.split(":")
print("domain---",domain)
#subdomain= domain[:(domain.rfind(".",1))]
gblsd = ""
while domain.find(".")!=-1:
remainingUrl,sd=domain.rsplit(".",1)
sd="."+sd+gblsd
subdomainlist.append(sd.lstrip("."))
gblsd=sd
domain=remainingUrl
print("subdomain---",subdomainlist)
if "#" in url:
url,anchor=url.split("#",1)
print("fragment---",anchor)
if "?" in url:
url,query=url.split("?",1)
print("query---",query)
if "/" in url:
port,path=url.split("/",1)
print("port---",port)
print("path---",path)
conn.close()
|
class temperaturas() :
fecha=[]
celcius=[]
farenheit=[]
arreglo=[]
def __inint__(self):
pass
def leer(self):
archivo=open("temperaturas.txt","r")
for row in archivo:
self.arreglo.append(int(row))
def convertir(self):
for row in self.arreglo:
celcius=float(input("ingresa tus temperaturas en celcius: "))
farenheit=(celcius*1.80)+32
print(farenheit)
def imprimir(self):
for row in self.arreglo:
print(row)
def promedio(self) :
cont=0
for row in self.arreglo:
cont += row
promedio_total=cont/ len(self.arreglo)
print (promedio_total)
repetir="si"
objeto=temperaturas()
while repetir=="si":
objeto.leer()
objeto.imprimir()
objeto.convertir()
if repetir=="N" or repetir =="n":
objeto.promedio()
repetir=input("desea repetir?" )
|
import numpy as np
import time
class NN():
def __init__(self, lambd = 0.01, alpha_0 = 1e-2, alpha_final = 1e-5, mu = 0.1,
hidden_nodes = 32, num_iter = 40):
self.lambd = lambd
self.alpha_0 = alpha_0
self.learning_const = np.log(1.0*alpha_0/alpha_final) / num_iter
self.mu = mu
self.hidden_nodes = hidden_nodes
self.num_iter = num_iter
def get_weights(self):
return self.w1, self.w2
# Returns a k-dimensional vector
def softmax(self, x):
#w is a n_classes by n_feats
exps = np.exp(x)
return exps / np.sum(exps, axis=1, keepdims = True)
# The derivative turns out to be extremely simple when combined with entropy loss
# def softmax_deriv(w, x):
def tanh_deriv(self, tanh):
return (1 - np.power(tanh, 2))
def sigmoid(self, x):
return 1.0 / (1 + np.exp(-x))
def sigmoid_deriv(self, sig):
return sig * (1 - sig)
def perc_correct(self, probs, y_inds):
# print probs.shape
preds = np.argmax(probs, axis = 1)
perc = np.mean([preds == y_inds])
return perc
def calculate_loss(self, probs, y_inds):
probs_correct = probs[range(self.n_data), y_inds]
# print 'probs:', probs[:10]
# print 'y_inds:', y_inds[:10]
# print 'probs_correct:', probs_correct
logprobs_correct = -np.log(probs_correct)
total_loss = np.sum(logprobs_correct)
total_loss += self.lambd/2 * (np.sum(np.square(self.w1)) + np.sum(np.square(self.w2)))
return 1./self.n_data * total_loss
def fit(self, X, y):
# y is some index of the correct category
classes = list(set(y))
self.classes_dict = {}
for i in xrange(len(classes)):
self.classes_dict[classes[i]] = i
print self.classes_dict
y_inds = map(lambda x: self.classes_dict[x], y)
self.n_classes = len(classes)
self.n_feats = X.shape[1]
self.n_data = X.shape[0]
self.hidden_nodes = 2*self.n_classes
np.random.seed(1)
#neural network has 2 hidden layers and X.shape[1] nodes in each layer
num_nodes = [self.n_feats, self.hidden_nodes, self.n_classes]
#initialize layer weights
self.w1 = np.random.randn(num_nodes[0], num_nodes[1]) / np.sqrt(num_nodes[0])
self.b1 = np.zeros((1, num_nodes[1]))
self.w2 = np.random.randn(num_nodes[1], num_nodes[2]) / np.sqrt(num_nodes[1])
self.b2 = np.zeros((1, num_nodes[2]))
print 'w1:', self.w1[:2,:10]
# print 'b1:', self.b1
# print 'w2:', self.w2
# print 'b2:', self.b2
begin = time.time()
for i in xrange(self.num_iter):
lambd = 0.01
alpha = self.alpha_0 * np.exp(-self.learning_const * i)
alpha = 1e-4
z1 = np.dot(X, self.w1) + self.b1 #n_data by n_hidden
a1 = np.tanh(z1)
z2 = np.dot(a1, self.w2) + self.b2 #n_data by n_classes
probs = self.softmax(z2)
l2_delta = np.array(probs)
l2_delta[range(self.n_data), y_inds] -= 1 #n_data by n_classes
l1_error = np.dot(l2_delta, self.w2.T)
l1_delta = l1_error * self.tanh_deriv(a1) #n_data by n_hidden
dW2 = np.dot(a1.T, l2_delta) #n_hidden by n_classes
db2 = np.sum(l2_delta, axis = 0, keepdims = True) #1 by n_classes
dW1 = np.dot(X.T, l1_delta)
db1 = np.sum(l1_delta, axis = 0, keepdims = True)
dW2 += self.lambd * self.w2
dW1 += self.lambd * self.w1
self.w1 += -alpha * dW1
self.b1 += -alpha * db1
self.w2 += -alpha * dW2
self.b2 += -alpha * db2
if i % (self.num_iter/10) == 0:
print 'alpha:', alpha
print 'w1:', self.w1[:2,:10]
# print 'b1:', self.b1
# print 'w2:', self.w2
# print 'b2:', self.b2
print 'Probs:', probs[:2,:10]
print 'Loss:', self.calculate_loss(probs, y_inds)
print 'Percent correct:', self.perc_correct(probs, y_inds)
print time.time() - begin
begin = time.time()
# Has momentum
# v2 = self.mu * v2 - alpha * np.dot(l1.T, l2_delta)
# v1 = self.mu * v1 - alpha * np.dot(X.T, l1_delta) #sum over all data points
# self.w2 += v2
# self.w1 += v1
print 'Loss:', self.calculate_loss(probs, y_inds)
print 'Percent correct:', self.perc_correct(probs, y_inds)
print time.time() - begin
begin = time.time()
def predict(self, X):
z1 = np.dot(X, self.w1) + self.b1
a1 = np.tanh(z1)
z2 = np.dot(a1, self.w2) + self.b2
probs = self.softmax(z2)
def score(self, X, y):
probs = self.predict(X)
y_inds = map(lambda x: self.classes_dict[x], y)
return self.perc_correct(probs, y_inds)
|
in_file_simC4_synA = open("/home/mshahandeh/BCsynA/sim_bcfs/simC4_synA.varX_SNPs-final.vcf", "r")
out_file_simC4_synA = open("/home/mshahandeh/BCsynA/sim_bcfs/simC4_synA.X_SNPs.txt" , "w")
done = 1
while done > 0:
i = in_file_simC4_synA.readline()
if i == '':
done = 0
elif i[0][0] == '#':
continue
else:
i = i.split('\t')
chrom = i[0]
pos = i[1]
ref = i[3]
alt = i[4]
sim_SNP = i[9]
sech_SNP = i[10]
sim_SNP = sim_SNP.replace(':' , '\t')
sim_SNP = sim_SNP.replace(',' , '\t')
sim_SNP = sim_SNP.replace('\n' , '')
sim_SNP = sim_SNP.split('\t')
sech_SNP = sech_SNP.replace(':' , '\t')
sech_SNP = sech_SNP.replace(',' , '\t')
sech_SNP = sech_SNP.replace('\n' , '')
sech_SNP = sech_SNP.split('\t')
if sech_SNP[0] == '0' and sech_SNP[1] == '0' and int(sech_SNP[2]) >= 30 and sim_SNP[0] == '1' and int(sim_SNP[1]) >= 30 and sim_SNP[2] == '0':
SNP = chrom + '\t' + pos + '\t' + ref + '\t' + alt + '\t' + '1' + '\t' + '0' + '\n'
elif sim_SNP[0] == '0' and sim_SNP[1] == '0' and int(sim_SNP[2]) >= 30 and sech_SNP[0] == '1' and int(sech_SNP[1]) >= 30 and sech_SNP[2] == '0':
SNP = chrom + '\t' + pos + '\t' + ref + '\t' + alt + '\t' + '0' + '\t' + '1' + '\n'
elif sech_SNP[0] == '0' and sech_SNP[1] == '0' and sech_SNP[2] == '0' or sim_SNP[0] == '0' and sim_SNP[1] == '0' and sim_SNP[2] == '0':
continue
else:
continue
out_file_simC4_synA.write(SNP)
in_file_simC4_synA.close()
out_file_simC4_synA.close() |
import pyodbc
import connections as conn
cursor_new = conn.conn_new.cursor()
cursor_new.execute("Insert Into Department Values('Books')")
cursor_new.execute("Insert Into Department Values('Clothing')")
cursor_new.execute("Insert Into Department Values('Makeup')")
cursor_new.execute("Insert Into Department Values('Kitchen')")
cursor_new.execute("Insert Into Department Values('Movies')")
cursor_new.execute("Insert Into Department Values('Pets')")
conn.conn_new.commit() |
# pyupbit module exercise
import pyupbit
print(pyupbit.Upbit)
tickers = pyupbit.get_tickers()
print(tickers)
tickers = pyupbit.get_tickers(fiat="KRW")
print(tickers)
price = pyupbit.get_current_price("KRW-ADA")
print(price)
price = pyupbit.get_current_price("BTC-ADA")
print(price)
price = pyupbit.get_current_price(["KRW-ADA", "KRW-ETH", "USDT-BTC"])
print(price)
tickers = ["KRW-BTC", "KRW-ETH", "KRW-XRP", "KRW-ADA"]
df = pyupbit.get_ohlcv(tickers[2])
print(df)
df = pyupbit.get_ohlcv(tickers[2], interval="month", count=50)
print(df)
orderbook = pyupbit.get_orderbook("KRW-ADA")
print(orderbook)
orderbook = pyupbit.get_orderbook("KRW-ADA")
print(orderbook)
print(type(orderbook))
bids_asks = orderbook['orderbook_units']
for bid_ask in bids_asks:
print(bid_ask)
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from typing import Optional
from .aws import Action as BaseAction
from .aws import BaseARN
service_name = "AWS Cost and Usage Report"
prefix = "cur"
class Action(BaseAction):
def __init__(self, action: Optional[str] = None) -> None:
super().__init__(prefix, action)
class ARN(BaseARN):
def __init__(self, resource: str = "", region: str = "", account: str = "") -> None:
super().__init__(
service=prefix, resource=resource, region=region, account=account
)
DeleteReportDefinition = Action("DeleteReportDefinition")
DescribeReportDefinitions = Action("DescribeReportDefinitions")
GetClassicReport = Action("GetClassicReport")
GetClassicReportPreferences = Action("GetClassicReportPreferences")
GetUsageReport = Action("GetUsageReport")
ModifyReportDefinition = Action("ModifyReportDefinition")
PutClassicReportPreferences = Action("PutClassicReportPreferences")
PutReportDefinition = Action("PutReportDefinition")
ValidateReportDestination = Action("ValidateReportDestination")
|
{
'sources': [
'../nodeif/jswrapbase.cc',
'../nodeif/fastJson.cc',
'../genes/genes.cc',
'../common/hacks.cc',
'../common/hacks_jsWrap.cc',
'../common/packetbuf.cc',
'../common/host_debug.cc',
'../common/jsonio.cc',
'../numerical/polyfit.cc',
'../numerical/haltonseq.cc'
]
}
|
from flask import Flask, render_template
from flask_mail import Mail
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_pagedown import PageDown
from .models import db, User, Role
from .config import configs
def register_blueprints(app):
from .handler import front, user, admin
for i in [front, user, admin]:
app.register_blueprint(i)
def register_extensions(app):
bootstrap = Bootstrap(app)
moment = Moment(app)
db.init_app(app)
migrate = Migrate(app, db)
register_blueprints(app)
mail = Mail(app)
pagedown = PageDown()
pagedown.init_app(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'front.login'
login_manager.session_protection = 'strong'
@login_manager.user_loader
def user_loader(id):
return User.query.get(id)
def create_app(config):
app = Flask(__name__)
app.config.from_object(configs.get(config))
register_blueprints(app)
register_extensions(app)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_server_error(error):
return render_template('500.html'), 500
return app
|
from django.shortcuts import render, get_object_or_404
from polls.models import Question
# view의 function이 하는 일은 request를 받아서 결과 template html
# 을 이용해서 결과파일을 만들어 내는 일.
def index(request):
# database에서 투표질문의 목록을 가져올 거예요!
# 원래는 문자열로 표현되야 하는데 .. ORM을 사용하다 보니 .. 각 레코드가
# Question 클래스의 객체로 표현.
my_list = Question.objects.all().order_by('-pub_date')
context = {'question_list':my_list}
return render(request, 'index.html', context)
def detail(request, aaa):
tmp = get_object_or_404(Question, pk=aaa)
context = {"question":tmp}
return render(request, 'detail.html', context)
def vote(request, bbb):
# URL로 넘어온 데이터(bbb)는 Question 객체의 id가 넘어왔어요!
question = get_object_or_404(Question, pk=bbb)
# request header 안에 form에서 선택한 데이터가 포함되서 전달되고
# 이것을 추출하기 위해서 request.POST["choice"]를 사용
selected_choice = question.choice_set.get(pk=request.POST["choice"])
selected_choice.votes += 1
selected_choice.save()
# result.html에서 현재 투표항목에 대한 각 항목들의 투표현황을 출력!!
context = {'question':question}
return render(request, 'result.html', context) |
# -*- coding: utf-8 -*-
'''
rename all the files in the filepath
'''
import os
def rename():
'''change here'''
path='path to file'
'''may change here'''
cnt = 1
prename = "000000"
filelist=os.listdir(path)#该文件夹下所有的文件(包括文件夹)
for files in filelist:#遍历所有文件
Olddir=os.path.join(path,files)#原来的文件路径
if os.path.isdir(Olddir):#如果是文件夹则跳过
continue
filename=os.path.splitext(files)[0]#文件名
filetype=os.path.splitext(files)[1]#文件扩展名
#style 1: 00000XX
Newdir=os.path.join(path,prename[0:len(prename)-len(str(cnt))]+str(cnt)+filetype);#新的文件路径
#style 2:imagesXX
#Newdir=os.path.join(path,'images'+str(cnt)+filetype);
os.rename(Olddir,Newdir);#重命名
cnt+=1;
rename(); |
import math
import time
from pyglet.window import mouse
import devices
import network
NODE_SIZE = 10
class Simulation:
def __init__(self):
self.current_mode = 2
self.selected_element = None
self.world = World()
self.world.simulation = self
self.net_manager = network.NetworkManager()
self.fps = 0
self.need_redraw = True
def add_device_to_world(self, device):
self.world.devices.append(device)
device.world = self.world
for n in device.nodes:
if n.is_source:
self.net_manager.create_net(n)
self.need_redraw = True
def remove_wire_connection(self, wire):
self.world.wires.remove(wire)
self.net_manager.remove_connection(wire.n1, wire.n2)
self.net_manager.remove_connection(wire.n2, wire.n1)
self.need_redraw = True
def create_wire_connection(self, node1, node2):
if node1 == node2:
print('cant connect the same node')
return
self.world.wires.append(Wire(node1, node2))
self.net_manager.create_connection(node1, node2)
self.net_manager.create_connection(node2, node1)
self.need_redraw = True
def update(self, dt):
self.net_manager.update()
for dev in self.world.devices:
dev.update()
self.fps = 1./dt
def delete_element_at(self,x,y):
element = self.get_element_at(x,y)
if isinstance(element, Wire):
self.remove_wire_connection(element)
return
def get_element_at(self,x,y):
#we check for wires
wire = self.world.get_wire_at(x,y)
if wire is not None:
return wire
node = self.world.get_closest_node(x,y)
if node is not None:
return node
device = self.world.get_closest_device(x, y)
if device is not None:
return device
return None
def select_element_at(self, x,y):
element = self.get_element_at(x,y)
if isinstance(element, network.Node):
self.selected_element = element
return
if isinstance(element, devices.Device):
element.on_mouse_event(x, y, mouse.LEFT, True)
self.selected_element = element
return
class Wire:
def __init__(self, node1, node2):
self.n1 = node1
self.n2 = node2
class World:
def __init__(self):
self.wires = []
self.devices = []
self.simulation = None
def get_closest_device(self, x, y):
for d in self.devices:
if d.pos[0] < x < d.pos[0] + d.size[0]:
if d.pos[1] < y < d.pos[1] + d.size[1]:
return d
return None
def get_closest_node(self, x, y, radius = NODE_SIZE):
for d in self.devices:
for n in d.nodes:
if (n.pos[0] - x) ** 2 + (n.pos[1] - y) ** 2 < radius ** 2:
return n
return None
def get_wire_at(self, x, y):
for w in self.wires:
if self.is_near_wire(w, x, y):
return w
return None
def is_near_wire(self, wire, x, y):
x0 = wire.n1.pos[0]
y0 = wire.n1.pos[1]
x1 = wire.n2.pos[0]
y1 = wire.n2.pos[1]
v = ((x - x0), (y - y0))
dist = math.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2)
u = ((x1 - x0) / dist, (y1 - y0) / dist)
dot = sum(v[i] * u[i] for i in [0, 1])
if dot < 0 or dot > dist:
return False
b0 = v[0] - dot * u[0]
b1 = v[1] - dot * u[1]
return (b0 ** 2 + b1 ** 2) < 5 ** 2 |
#import os
import sys
import re
#将目标文件读入,以列表形式返回,去除了符号
def ReadFile(path):
#path为传入的c,cpp路径,这里取相对路径
# file_object=open(path,'r',encoding='utf-8')
# list_of_all_lines=file_object.read().splitlines()
# file_content=str(list_of_all_lines).replace(" ","")
# file_object.close()
# return file_content
fp=open(path,'r',encoding='utf-8')
#通过正则化原文本内容,去除所有符号
punc = '~`!#$%^&*()_+-=|\';":/.,?><~·!@#¥%……&*()——+-=“:’;、。,?》《{}'
file_content=re.sub(r"[%s]+" %punc, " ",fp.read())
#文本通过换行符分割
c=file_content.split('\n')
return c
def SplitTxtByBlank(file_content):
key_list=[]
for single_line in file_content:
single_word=single_line.split(' ')
single_word=list(filter(None,single_word))
for element in single_word:
if element!='':
key_list.append(element)
#key_list.append(single_word)
return key_list
#返回文本中题目要求的关键词组成的列表(该关键词并非标准C或CPP关键词)
def GetKey(file_content):
list_dest = []
for i in file_content:
if 'else if' in i:
list_dest.append('else if')
elif 'if' in i:
list_dest.append('if')
elif 'else' in i:
list_dest.append('else')
elif 'switch' in i:
list_dest.append('switch')
elif 'case' in i:
list_dest.append('case')
elif 'break' in i:
list_dest.append('break')
elif 'default' in i:
list_dest.append('default')
elif 'return' in i:
list_dest.append('return')
return list_dest
def GetExtractKeyList(file_content):
list_dest = []
for i in file_content:
if 'else if' in i:
list_dest.append('else if')
elif 'if' in i:
list_dest.append('if')
elif 'else' in i:
list_dest.append('else')
elif 'switch' in i:
list_dest.append('switch')
elif 'case' in i:
list_dest.append('case')
return list_dest
def CountStdCKey(key_list):
std_CKey=['else if','char','double','enum','float','int','long',
'short','signed','struct','union','unsigned',
'void','for','do','while','break','continue',
'if','else','goto','switch','case','default',
'return','auto','extern','register','static',
'const','sizeof','typedef','volatile'
]
# std_Key=[
# 'else if','if','else','switch','case','break','default'
# ]
num_key=0
for element in key_list:
if element in std_CKey:
num_key+=1
return num_key
def CountCase(Extracted_Key_list):
if(Extracted_Key_list.count('switch')):
num_case=[]
for i in range(len(Extracted_Key_list)):
if Extracted_Key_list[i]== 'switch':
offset=1
cnt=0
while(1):
if(Extracted_Key_list[i + offset]== 'case'):
cnt+=1
offset+=1
else:
break
num_case.append(cnt)
return num_case
else:
return [0]
file_content=ReadFile('../MyCode/CSample.cpp')
key_list=SplitTxtByBlank(file_content)
Extracted_Key_list=GetExtractKeyList(file_content)
print(key_list)
print(GetExtractKeyList(file_content))
print('total num: ',CountStdCKey(key_list))
key_list=GetKey(file_content)
print('switch num: ',key_list.count('switch'))
print('case num: ',end='')
print(*CountCase(Extracted_Key_list),sep=' ')
|
from django.urls import path
from .views import *
urlpatterns = [
path('', Home,name='home-page'),
path('services/', Services,name='services'),
path('contact/', Contact,name='contact'),
]
|
# -*- coding: utf-8 -*-
def flesch_kincaid_grade(sentence_count, word_count, syllable_count):
if word_count == 0 or sentence_count == 0:
return .0
else:
return .39 * (float(word_count) / sentence_count) + \
11.8 * (float(syllable_count) / word_count) - 15.59
|
import os
import runez
from mock import patch
from pickley import PickleyConfig
from pickley.env import PythonFromPath, std_python_name
def test_standardizing():
assert std_python_name(None) == "python"
assert std_python_name("") == "python"
assert std_python_name("2") == "python2"
assert std_python_name("3") == "python3"
assert std_python_name("py3") == "python3"
assert std_python_name("python3") == "python3"
assert std_python_name("python 3") == "python3"
assert std_python_name("37") == "python3.7"
assert std_python_name("3.7") == "python3.7"
assert std_python_name("py37") == "python3.7"
assert std_python_name("python37") == "python3.7"
assert std_python_name("python 37") == "python3.7"
assert std_python_name("377") == "python3.7.7"
assert std_python_name("3.7.7") == "python3.7.7"
assert std_python_name("py377") == "python3.7.7"
assert std_python_name("python 377") == "python3.7.7"
assert std_python_name("foo") == "foo"
assert std_python_name("py 37") == "py 37"
assert std_python_name("3777") == "3777"
assert std_python_name("pyth37") == "pyth37"
assert std_python_name("/foo/python2.7") == "/foo/python2.7"
def mk_python(path, version, executable=True):
runez.write(path, "#!/bin/bash\necho %s\n" % version)
if executable:
runez.make_executable(path)
def test_searching(temp_folder):
cfg = PickleyConfig()
cfg.set_base(".")
cfg.configs[0].values["pyenv"] = "pyenv-folder"
# Simulate a few dummy python installations
mk_python("p1/python", "2.5.0")
mk_python("p2/python3", "2.9.1") # picking an unlikely version, for testing
mk_python("pyenv-folder/versions/2.9.2/bin/python", "2.9.2")
mk_python("pyenv-folder/versions/2.9.3/bin/python", "2.9.3", executable=False)
mk_python("dummy/python", "0.1.2")
p = PythonFromPath("dummy/python")
assert p.executable == "dummy/python"
assert p.problem == "--version did not yield major version component"
assert p.version == "0.1.2"
runez.write("dummy/python2", "#!/bin/bash\nexit 1\n")
runez.make_executable("dummy/python2")
p = PythonFromPath("dummy/python2")
assert p.executable == "dummy/python2"
assert p.problem == "does not respond to --version"
assert not p.version
p = PythonFromPath("p1/python", version="3.7.1") # Simulate 3.7.1
assert p.needs_virtualenv
with patch.dict(os.environ, {"PATH": "p1:p2"}, clear=True):
invoker = cfg.find_python()
assert cfg.find_python(None) is invoker
assert cfg.find_python("python") is invoker
assert invoker.is_invoker
p1 = cfg.find_python("/usr/bin/python")
p2 = cfg.find_python("/usr/bin/python") # Python install references are cached
assert p1 is p2
assert cfg.find_python(p1) is p1
assert str(p1) == "/usr/bin/python"
assert p1.satisfies("/usr/bin/python")
p = cfg.find_python("python2.9")
assert not p.problem
assert p.version == "2.9.2"
p.satisfies("py31")
p.satisfies("py-2.9.2")
assert not p.is_invoker
assert cfg.find_python("python2.9") is p # Now cached
assert cfg.find_python("py29") is p # Standard name is tried too
p = cfg.find_python("python2.9.9")
assert p.executable == "python2.9.9"
assert p.problem == "not available"
assert cfg.find_python("python2.9.9") is p # Now cached, even if problematic
|
# import all necessary libraries
import pandas
from pandas.tools.plotting import scatter_matrix
from sklearn import cross_validation
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
# load the dataset (local path)
url = "data.csv"
# feature names
features = ["MDVP:Fo(Hz)","MDVP:Fhi(Hz)","MDVP:Flo(Hz)","MDVP:Jitter(%)","MDVP:Jitter(Abs)","MDVP:RAP","MDVP:PPQ","Jitter:DDP","MDVP:Shimmer","MDVP:Shimmer(dB)","Shimmer:APQ3","Shimmer:APQ5","MDVP:APQ","Shimmer:DDA","NHR","HNR","RPDE","DFA","spread1","spread2","D2","PPE","status"]
dataset = pandas.read_csv(url, names = features)
# store the dataset as an array for easier processing
array = dataset.values
# X stores feature values
X = array[:,0:22]
# Y stores "answers", the flower species / class (every row, 4th column)
Y = array[:,22]
validation_size = 0.3
# randomize which part of the data is training and which part is validation
seed = 7
# split dataset into training set (80%) and validation set (20%)
X_train, X_validation, Y_train, Y_validation = cross_validation.train_test_split(X, Y, test_size = validation_size, random_state = seed)
# 10-fold cross validation to estimate accuracy (split data into 10 parts; use 9 parts to train and 1 for test)
num_folds = 10
num_instances = len(X_train)
seed = 7
# use the 'accuracy' metric to evaluate models (correct / total)
scoring = 'accuracy'
predictions = []
for instance in X_validation:
predictions.append(1)
print(accuracy_score(Y_validation, predictions)*100)
print(matthews_corrcoef(Y_validation, predictions))
|
__version__ = "0.0.1"
__all__ = []
from pysqa.queueadapter import QueueAdapter
from pysqa.executor.executor import Executor
from ._version import get_versions
__version__ = get_versions()["version"]
del get_versions
|
"""Summarise hazard data
Susceptibility levels
---------------------
5 - rất cao - very high
4 - cao - high
3 - trung bình - medium
2 - thấp - low
1 - rất thấp - very low
Filename abbreviations
----------------------
lsz_45_2025 - landslide susceptibility zones under RCP 4.5 in 2025
lsz_85_2025 - landslide susceptibility zones under RCP 8.5 in 2025
ffsz_rcp45_25 - flash flood susceptibility zones under RCP 4.5 in 2025
ffsz_rcp85_25 - flash flood susceptibility zones under RCP 8.5 in 2025
Format notes
------------
MapInfo files
- .TAB is main file, .DAT, .ID, .MAP typically go alongside (.WOR is a workspace)
- .TAB/.txt or .TAB/.tif pairs for raster data
ArcGrid files
- hdr.adf is main file
ESRI Shapefiles
- .shp is main file
"""
import json
import csv
import glob
import os
import fiona
import fiona.crs
import rasterio
def main():
config_path = os.path.realpath(
os.path.join(os.path.dirname(__file__), '..', '..', 'config.json')
)
with open(config_path, 'r') as config_fh:
config = json.load(config_fh)
incoming_data_path = config['paths']['incoming_data']
hazard_path = os.path.join(incoming_data_path, 'Natural_Hazard_Maps', 'Maps')
vector_exts = ['.shp', '.TAB']
raster_exts = ['.tif', '.txt']
pattern = os.path.join(hazard_path, '**', '*.*')
report_path = os.path.join(hazard_path, 'report.csv')
with open(report_path, 'w', newline='') as output_fh:
writer = csv.writer(output_fh)
writer.writerow(('filename', 'path', 'format', 'type', 'crs', 'bounds', 'number_of_features', 'fields'))
for file_path in glob.glob(pattern, recursive=True):
ext = os.path.splitext(file_path)[1]
filename = os.path.split(file_path)[1]
file_path_detail = str(file_path).replace(
str(incoming_data_path),
''
)
if ext in vector_exts:
details = vector_details(file_path)
if details:
fields, geometry_type, crs, bounds, number_of_features = details
row = (
filename,
file_path_detail,
ext,
'vector:{}'.format(geometry_type),
crs,
bounds,
number_of_features,
fields
)
writer.writerow(row)
if ext in raster_exts or filename == 'hdr.adf':
bands, crs, bounds, number_of_cells = raster_details(file_path)
row = (
filename,
file_path_detail,
ext,
'raster',
crs,
bounds,
number_of_cells,
bands
)
writer.writerow(row)
def vector_details(file_path):
try:
with fiona.open(file_path, 'r') as source:
fields = [(k,v) for k, v in source.schema['properties'].items()]
geometry_type = source.schema['geometry']
crs = fiona.crs.to_string(source.crs)
bounds = source.bounds
number_of_features = len(source)
return fields, geometry_type, crs, bounds,number_of_features
except Exception as ex:
print("INFO: fiona read failure (likely not a vector file):", ex)
return None
def raster_details(file_path):
with rasterio.open(file_path) as dataset:
bbox = dataset.bounds
bounds = (bbox.left, bbox.bottom, bbox.right, bbox.top)
number_of_cells = dataset.width * dataset.height
if dataset.crs.is_valid:
crs = dataset.crs.to_string()
else:
crs = 'invalid/unknown'
bands = [(i, dtype) for i, dtype in zip(dataset.indexes, dataset.dtypes)]
return bands, crs, bounds, number_of_cells
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from bottle import route, run, request, response, abort, default_app, get, post
import json
from create import create
from delete import delete
from retrieve import retrieve
from add_activities import add_activities
import os
import re
import sys
import os.path
import boto.dynamodb2
from boto.dynamodb2.fields import HashKey, RangeKey,KeysOnlyIndex, GlobalAllIndex
from boto.dynamodb2.table import Table
def createDynamoObject():
try:
users = Table.create('data', schema=[HashKey('id')],global_indexes=[GlobalAllIndex('EverythingIndex', parts=[HashKey('name')])],connection=boto.dynamodb2.connect_to_region('us-west-2'));
except boto.exception.JSONResponseError:
users = Table('data',connection=boto.dynamodb2.connect_to_region('us-west-2'))
print "1) Table 'data' already created."
#On first Run this wont insert data because of delay to create table on aws server side.
try:
users.put_item(data={
'id': '3',
'type': 'person',
'name': 'dummy',
'activities': ['activity one'],
})
except:
print "2) Dummy Data already added."
return users
boto = createDynamoObject()
@route('/create')
def parse_create():
rq_id = request.query.id
rq_name = request.query.name
rq_activities = request.query.activities
final_response= create.do_create(rq_id, rq_name, rq_activities, boto)
response.set_header('Content-Language', 'en')
response.status = final_response['status']
json_response = final_response['json']
json_ret = json.dumps(json_response)
return json_ret
@route('/retrieve')
def parse_retrieve():
rq_id = request.query.id
rq_name = request.query.name
rq_activities = request.query.activities
final_response= retrieve.do_retrieve(rq_id, rq_name, rq_activities,boto)
response.set_header('Content-Language', 'en')
response.status = final_response['status']
json_response = final_response['json']
json_ret = json.dumps(json_response)
return json_ret
@route('/delete')
def parse_delete():
rq_id = request.query.id
rq_name = request.query.name
rq_activities = request.query.activities
final_response= delete.do_delete(rq_id, rq_name, rq_activities)
response.set_header('Content-Language', 'en')
response.status = final_response['status']
json_response = final_response['json']
json_ret = json.dumps(json_response)
return json_ret
@route('/add_activities')
def parse_add_activities():
rq_id = request.query.id
rq_activities = request.query.activities
final_response= add_activities.do_add_activities(rq_id, rq_activities, boto)
response.status = final_response['status']
json_response = final_response['json']
json_ret = json.dumps(json_response)
return json_ret
run(host='localhost', port=8080, debug=True) |
# -*- coding: utf-8 -*-
# @Time : 2019/6/26 19:06
# @Author : chinablue
# @Email : dongjun@reconova.cn
# @File : exceptions.py
'''
跟踪记录所有抛出的异常
'''
# 数据异常
class ParamTypeException(Exception):
'''
数据类型异常
'''
pass
# http异常
class HttpException(Exception):
'''
Http异常
'''
pass
class HttpRequestException(HttpException):
'''
Http异常:http请求异常
'''
pass
class HttpResponseException(HttpException):
'''
Http异常:http响应异常
'''
pass
class ListOptionsException(Exception):
'''
列表选项异常
例如:某个字段只能选择1,2,3;而你选择了4
'''
pass
class ExtractJsonException(Exception):
'''
提取json信息异常
'''
pass
class FileException(Exception):
'''
文件异常
'''
pass
class DictException(Exception):
'''
字典异常
'''
pass
class MyBaseError(Exception):
pass
class FileNotFound(Exception):
pass
class FileContentEmpty(Exception):
pass
class FileFormatError(Exception):
pass
class ParamNotStrType(Exception):
pass
class ParamNotIntType(Exception):
pass
class ParamEmptyStr(Exception):
pass
class VariableNotFound(Exception):
pass
class ParamsError(Exception):
pass
class FunctionNotFound(Exception):
pass
class HttpRequestError(Exception):
pass
class NotJsonFormat(Exception):
pass
class HttpResponseBodyNotJsonFormat(NotJsonFormat):
pass
class HttpResponseErrorUnauthorized(Exception):
pass
class HttpUrlNotExist(Exception):
pass
class HttpServerInnerError(Exception):
pass
class HttpServerRunException(Exception):
############################################
# 请排查如下情况: #
# 1. 网络问题,能不能ping通。 #
# 2. 远程服务有没有启动,端口是否可用 #
# 3. 远程服务请求是否繁忙导致的时好时坏 #
############################################
pass
class HttpUnknownError(Exception):
pass
class DefinedBusinessException(Exception):
pass
class UndefinedBusinessException(Exception):
pass
class WaitAPITimeoutException(Exception):
pass
class CsvContentException(Exception):
pass
class ParameterizeFunctionException(Exception):
pass
|
import dash
import dash_html_components as html
import dash_core_components as dcc
from dash.dependencies import Input, Output
import plotly.graph_objs as go
import plotly.express as px
import pandas as pd
import numpy as np
from urllib.request import urlopen
import json
import pathlib
from app import app
import data_loader
PATH = pathlib.Path(__file__).parent.parent
total_amount_2020 = '9.12 Billion'
total_amount_2019 = '10.86 Billion'
data = {}
data[2020] = data_loader.load_cms_data(2020)
data[2019] = data_loader.load_cms_data(2019)
def get_df_speciality(year):
cms_by_year = data[year].copy()
#cms_by_year = data_loader.load_cms_data(year)
#cms_by_year = pd.read_csv(PATH.joinpath(f'cms_new_{year}.csv'))
df_speciality_by_year = cms_by_year[['physician_specialty', 'Total_payment']]
df_speciality_by_year['physician_specialty'] = df_speciality_by_year['physician_specialty'].str.replace('Allopathic & Osteopathic Physicians|','')
df_speciality_by_year['physician_specialty'] = df_speciality_by_year['physician_specialty'].str.strip('|')
df_speciality_by_year['physician_specialty'] = df_speciality_by_year['physician_specialty'].str.lstrip()
df_speciality_grp = df_speciality_by_year.groupby(['physician_specialty']).sum()
df_speciality_grp = df_speciality_grp.sort_values(by = ['Total_payment'],ascending=False )
df_speciality_top_20= df_speciality_grp.iloc[:20,:].reset_index()
if year == 2019:
speciality = {'Podiatric Medicine & Surgery Service Providers|Podiatrist|Foot & Ankle Surgery': 'Foot & Ankle Surgery','Internal Medicine|Hematology & Oncology': 'Hematology & Oncology', 'Internal Medicine|Cardiovascular Disease': 'Cardiovascular Disease', 'Orthopaedic Surgery|Orthopaedic Surgery of the Spine': 'Spine Surgery', 'Orthopaedic Surgery|Adult Reconstructive Orthopaedic Surgery' : 'Adult Reconstructive','Internal Medicine|Endocrinology, Diabetes & Metabolism': 'Endocrinology', 'Eye and Vision Services Providers|Optometrist': 'Optometrist', 'Orthopaedic Surgery|Orthopaedic Trauma': 'Orthopaedic Trauma' ,'Orthopaedic Surgery|Hand Surgery' : 'Hand Surgery', 'Internal Medicine|Pulmonary Disease': 'Pulmonary Disease', 'Orthopaedic Surgery|Sports Medicine': 'Sports Medicine'}
df_speciality_top_20.replace({"physician_specialty": speciality}, inplace = True)
else:
speciality = {'Internal Medicine|Hematology & Oncology': 'Hematology & Oncology', 'Internal Medicine|Cardiovascular Disease': 'Cardiovascular Disease', 'Orthopaedic Surgery|Orthopaedic Surgery of the Spine': 'Spine Surgery', 'Orthopaedic Surgery|Adult Reconstructive Orthopaedic Surgery' : 'Adult Reconstructive','Internal Medicine|Endocrinology, Diabetes & Metabolism': 'Endocrinology', 'Internal Medicine|Gastroenterology': 'Gastroenterology', 'Internal Medicine|Rheumatology': 'Rheumatology' ,'Orthopaedic Surgery|Hand Surgery' : 'Hand Surgery', 'Internal Medicine|Pulmonary Disease': 'Pulmonary Disease', 'Radiology|Vascular & Interventional Radiology': 'Radiology'}
df_speciality_top_20.replace({"physician_specialty": speciality}, inplace = True)
df_speciality_top_20 = df_speciality_top_20.set_index('physician_specialty')
return df_speciality_top_20
def get_df_month(year):
cms_by_year = data[year].copy()
#cms_by_year = data_loader.load_cms_data(year)
#cms_by_year = pd.read_csv(PATH.joinpath(f'cms_new_{year}.csv'))
df_month = cms_by_year[['Payment_month', 'Total_payment']].groupby(['Payment_month']).sum()
months = ['Jan', 'Feb', 'Mar', 'Apr','May','Jun', 'Jul', 'Aug','Sep', 'Oct', 'Nov', 'Dec']
df_month.index = pd.CategoricalIndex(df_month.index, categories=months, ordered=True)
df_month = df_month.sort_index()
return df_month
def get_df_drug(year):
cms_by_drug = data[year].copy()
#cms_by_drug = data_loader.load_cms_data(year)
#cms_by_drug = pd.read_csv(PATH.joinpath(f'cms_new_{year}.csv'))
drug_name = cms_by_drug[['DrugName', 'Total_payment']].groupby(['DrugName']).sum().sort_values(by=['Total_payment'], ascending = False)
drug_name = drug_name.iloc[:10,:]
return drug_name
def get_df_payment_type(year):
cms_by_year = data[year].copy()
#cms_by_year = data_loader.load_cms_data(year)
payment_type_year = cms_by_year[['nature_of_payment_or_transfer_of_value', 'Total_payment']]
payment_types = {'Compensation for services other than consulting, including serving as faculty or as a speaker at a venue other than a continuing education program': 'Compensation_Other', 'Compensation for serving as faculty or as a speaker for a non-accredited and noncertified continuing education program':'Faculty/speaker at NonAcc', 'Space rental or facility fees (teaching hospital only)' : 'Space Rental','Compensation for serving as faculty or as a speaker for an accredited or certified continuing education program': 'Faculty/speaker at Acc', 'Current or prospective ownership or investment interest' : 'Investment Interest'}
payment_type_year.replace({"nature_of_payment_or_transfer_of_value": payment_types}, inplace = True)
return payment_type_year
def get_drug_device_entity_names(year):
df = data[year].copy()
df['Paying_Entity'] = df['Paying_Entity'].str.lower()
paying_entities = df.Paying_Entity.unique()
paying_entities.sort()
return paying_entities
top_entity_names = {}
top_entity_names[2020] = get_drug_device_entity_names(2020)
top_entity_names[2019] = get_drug_device_entity_names(2019)
def get_fig_drug_device(year, entity_name):
PayingEntity = data[year].copy()
#PayingEntity = data_loader.load_cms_data(year)
PayingEntity['physician_specialty'] = PayingEntity['physician_specialty'].str.replace('Allopathic & Osteopathic Physicians|','')
PayingEntity['physician_specialty'] = PayingEntity['physician_specialty'].str.strip('|')
PayingEntity['physician_specialty'] = PayingEntity['physician_specialty'].str.lstrip()
PayingEntity['Paying_Entity'] = PayingEntity['Paying_Entity'].str.lower()
PayingEntity_grpd = PayingEntity.groupby(['Paying_Entity', 'DrugName']).sum().reset_index()
fig = go.Figure()
default_comp = entity_name
# default_comp = top_entity_names[2020][0]
# if year == 2019:
# default_comp = top_entity_names[2019][0]
payment = PayingEntity_grpd[(PayingEntity_grpd['Paying_Entity'] == entity_name)]
# We have to add traces
fig.add_trace(go.Bar(x = payment['DrugName'] , y = payment['Total_payment'], visible = (entity_name == default_comp),text = payment["Total_payment"],texttemplate='%{text:.2s}', textposition='outside'))
fig.update_layout(height = 800)# title_text= 'Drug/Device by Pharma/medical device entity in 2020' , title_x=0.5, yanchor='bottom')
fig.update_layout(title={
'text': f"Drug/Device by entity in {year}",
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'bottom'})
return fig
layout = html.Div([
html.Div([
html.H4('Open Payments Overview')
], style={'textAlign':'center','marginTop':'-15px'},id='subheader', className='subheader'),
html.Div([
html.Div([
html.H5('2020 Summary')
], style={'textAlign':'center'},id='smallheader1',className='six columns smallheader'),
html.Div([
html.H5('2019 Summary')
], style={'textAlign':'center'},id='smallheader2',className='six columns smallheader')
],className='row flex-display'),
# row of cards (row 2)
html.Div([
# 2020 amount
html.Div([
html.H6(children='Total Amount',
style={'textAlign': 'center'}),
html.P(f"{total_amount_2020}",
style={'textAlign': 'center', 'fontSize': 30}),
], className='card_container two columns'), # 2020 amount ends
# blank card
html.Div([
html.H6(style={'textAlign': 'center'}),
], className='card_container two columns'), # blank card ends
# blank card
html.Div([
html.H6(style={'textAlign': 'center'}),
], className='card_container two columns'), # blank card ends
# 2019 amount
html.Div([
html.H6(children='Total Amount',
style={'textAlign': 'center'}),
html.P(f"{total_amount_2019}",
style={'textAlign': 'center', 'fontSize': 30}),
], className='card_container two columns'), # 2019 amount ends
# blank card
html.Div([
html.H6(style={'textAlign': 'center'}),
], className='card_container two columns'), # blank card ends
# blank card
html.Div([
html.H6(style={'textAlign': 'center'}),
], className='card_container two columns'), # blank card ends
], className='row flex display'), #row 2 ends
#figures
html.Div([
#2020 plot
html.Div([
html.Div([
html.Div([
dcc.RadioItems(
id='option_2020',
options=[
{'value':'1','label':'Speciality'},
{'value':'2','label':'Month'},
{'value':'3','label':'Drug'},
{'value':'4','label':'PaymentType'},
],
value="1",
labelStyle={'display':'inline-block'}
),
],className='eight columns'),
],className='row flex display'),
dcc.Graph(id='plot_2020')
],className='card_container six columns'),
#2019 plot
html.Div([
html.Div([
html.Div([
dcc.RadioItems(
id='option_2019',
options=[
{'value':'1','label':'Speciality'},
{'value':'2','label':'Month'},
{'value':'3','label':'Drug'},
{'value':'4','label':'PaymentType'},
],
value="1",
labelStyle={'display':'inline-block'}
),
],className='eight columns'),
],className='row flex display'),
dcc.Graph(id='plot_2019')
],className='card_container six columns'),
], className='row flex display'),
#Drug device by entity
html.Div([
#2020 plot
html.Div([
dcc.Dropdown(id="entity_names_2020",
options = [ { "value": entity, "label": entity } for entity in top_entity_names[2020] ],
multi=False,
value=top_entity_names[2020][0]
),
dcc.Graph(id='drug_device_2020', config={'displayModeBar':'hover'})
],className='card_container six columns'),
#2019 plot
html.Div([
dcc.Dropdown(id="entity_names_2019",
options = [ { "value": entity, "label": entity } for entity in top_entity_names[2019] ],
multi=False,
value=top_entity_names[2019][0]
),
dcc.Graph(id='drug_device_2019', config={'displayModeBar':'hover'})
],className='card_container six columns'),
], className='row flex display')
], id='paymentsContainer', style={'display': 'flex','flex-direction':'column'})#last line don't touch
#plot_2020
@app.callback(
Output('plot_2020', 'figure'),
[Input("option_2020", "value")])
def display_plot_2020(option_2020):
if option_2020 == '1':
title = 'Payments in 2020 by Speciality'
df_speciality = get_df_speciality(2020)
fig = px.bar(df_speciality, x=df_speciality.index, y="Total_payment", text = "Total_payment",
height=650
#color='Total_payment', #barmode='group',
)
fig.update_layout(title_text=title, title_x=0.5,margin={"r":0,"l":0,"b":0}, xaxis={'categoryorder':'total descending'})
fig.update_traces(texttemplate='%{text:.2s}', textposition='outside', marker_color='lightsalmon')
fig.update_yaxes(range=[0, 55000000])
elif option_2020 == '2':
title = 'Payments in 2020 by Month'
df_month = get_df_month(2020)
fig = px.line(df_month, x=df_month.index, y="Total_payment", text = "Total_payment",
#color='smoker', barmode='group',
height=600)
fig.update_layout(title_text=title, title_x=0.5)
fig.update_traces(texttemplate='%{text:.2s}', textposition='top center',marker_color='lightsalmon')
fig.update_yaxes(range=[1000000, 18500000])
elif option_2020 == '3':
title = 'Payments in 2020 by Drug/Device'
df_drug = get_df_drug(2020)
fig = px.bar(df_drug, x=df_drug.index, y="Total_payment", text = 'Total_payment',
#color='smoker', barmode='group',
height=650)
fig.update_layout(title_text=title, title_x=0.5,xaxis={'categoryorder':'total descending'})
fig.update_traces(texttemplate='%{text:.2s}', textposition='outside',marker_color='lightsalmon')
fig.update_yaxes(range=[0, 20000000])
elif option_2020 == '4':
title = 'Total Payments by Nature of Payments in 2020'
df_payment_type = get_df_payment_type(2020)
fig = px.pie(df_payment_type, values='Total_payment', names='nature_of_payment_or_transfer_of_value')
fig.update_traces(textposition='inside')
fig.update_layout(uniformtext_minsize=12, uniformtext_mode='hide', title = title, title_x=0.5)
fig.update_traces(textposition='inside', textinfo='percent+label')
return fig
# #plot_2019
@app.callback(
Output('plot_2019', 'figure'),
[Input("option_2019", "value")])
def display_plot_2019(option_2019):
if option_2019 == '1':
df_speciality = get_df_speciality(2019)
title = 'Payments in 2019 by Speciality'
fig = px.bar(df_speciality, x=df_speciality.index, y="Total_payment", text = "Total_payment",
height=600
#color='Total_payment', #barmode='group',
)
fig.update_layout(title_text=title, title_x=0.5,margin={"r":0,"l":0,"b":0}, xaxis={'categoryorder':'total descending'})
fig.update_traces(texttemplate='%{text:.2s}', textposition='outside')
fig.update_yaxes(range=[0, 55000000])
elif option_2019 == '2':
title = 'Payments in 2019 by Month'
df_month = get_df_month(2019)
fig = px.line(df_month, x=df_month.index, y="Total_payment", text = 'Total_payment',
#color='smoker', barmode='group',
height=600)
fig.update_layout(title_text=title, title_x=0.5)
fig.update_traces(texttemplate='%{text:.2s}', textposition='top center',marker_color='lightsalmon')
elif option_2019 == '3':
title = 'Payments in 2019 by Drug/Device'
df_drug = get_df_drug(2019)
fig = px.bar(df_drug, x=df_drug.index, y="Total_payment", text = "Total_payment",
##color='smoker', barmode='group',
height=600)
fig.update_layout(title_text=title, title_x=0.5,xaxis={'categoryorder':'total descending'})
fig.update_traces(texttemplate='%{text:.2s}', textposition='outside')
elif option_2019 == '4':
title = 'Total Payments by Nature of Payments in 2019'
df_payment_type = get_df_payment_type(2019)
fig = px.pie(df_payment_type, values='Total_payment', names='nature_of_payment_or_transfer_of_value')
fig.update_traces(textposition='inside')
fig.update_layout(uniformtext_minsize=12, uniformtext_mode='hide', title = title, title_x=0.5)
fig.update_traces(textposition='inside', textinfo='percent+label')
return fig
@app.callback(
Output('drug_device_2020', 'figure'),
[Input("entity_names_2020", "value")])
def display_drug_device_2020(entity_names_2020):
fig = get_fig_drug_device(2020, entity_names_2020 )
return fig
@app.callback(
Output('drug_device_2019', 'figure'),
[Input("entity_names_2019", "value")])
def display_drug_device_2019(entity_names_2019):
fig = get_fig_drug_device(2019, entity_names_2019 )
return fig
|
import csv
import numpy as np
from numpy import genfromtxt, savetxt
from sklearn.ensemble import RandomForestClassifier
#Specify training data, testing data, and labeled data file locations
ifile = open('../Data/train.csv', "r")
tfile = open('../Data/test.csv',"r")
data = genfromtxt(ifile,delimiter=',',dtype='f8')[1:]
labels = [x[0] for x in data]
train =[x[1:] for x in data]
test = genfromtxt(tfile,delimiter=',',dtype='f8')[1:]
#Train data
rf = RandomForestClassifier(n_estimators=100)
rf.fit(train,labels)
#Classify
classified = rf.predict(test)
#Write
savetxt('../Data/submission-rf-py.csv',classified,delimiter=',',fmt='%f') |
from Planner import Planner
if __name__ == "__main__":
print 'Enter EID:'
eid = raw_input()
print 'Enter Password:'
pwd = raw_input()
p = Planner(eid, pwd)
while True:
print "Enter command:"
cmd = raw_input()
if cmd == "add":
print "Enter unique number to add:"
unique_number = raw_input()
p.add_class(unique_number)
if cmd == 'print':
p.print_schedule()
|
import urllib, urllib2, logging
import openanything
from pylons import cache, config, request
import datetime
from demisaucepy import demisauce_ws_get
import pylons
from pylons.util import AttribSafeContextObj, ContextObj
from pylons.i18n import ugettext
from xmlnode import XMLNode
from demisaucepy import cfg
log = logging.getLogger(__name__)
def current_url():
qs = ''
if 'QUERY_STRING' in request.environ:
qs = '?%s' % request.environ['QUERY_STRING']
url = 'http://%s/%s%s' % (request.host,request.path_info,qs)
return url
def route_url(includeaction=True):
""" Returns the url minus id, so controller/action typically"""
url = ''
if 'controller' in request.environ['pylons.routes_dict']:
url += '/%s' % request.environ['pylons.routes_dict']['controller']
if includeaction and 'action' in request.environ['pylons.routes_dict']:
url += '/%s' % request.environ['pylons.routes_dict']['action']
return url
def help_url(includeaction=True):
"""Returns the Html for the page you are on"""
url = '/api/script/cms/root/help%s' % route_url(includeaction)
return url
def get_admin_permissions():
"""
Returns a Boolean if current user has permission to add/edit cms items
"""
if 'pylons.h' in config:
try:
if 'pylons.h' in cfg.CFG:
dsadmin = getattr(cfg.CFG['pylons.h'], 'isdemisauce_admin')
if dsadmin:
return cfg.CFG['pylons.h'].isdemisauce_admin()
except AttributeError:
pass
return False
def demisauce_xmlnodes(**kwargs):
#TODO: implement a simpleinterface similar to declarative mapper
#phphello = has_a(name='helloworld',app='phpdemo',lazy=True,local_key='id' )
raise NotImplementedError
def wordpress_page(page_id):
"""wordpress content"""
return ''
def remote_html(resource_id='',routes_dict=None,append_path=False,**kwargs):
"""
Accepts a key of which content is desired
Returns None if not available or not found
"""
url = request.environ['PATH_INFO']
if append_path and routes_dict != None:
resource_id += request.environ['pylons.routes_dict'][routes_dict]
elif append_path and routes_dict == None:
#resource_id += url
resource_id += '/%s' % request.environ['pylons.routes_dict']['controller']
resource_id += '/%s' % request.environ['pylons.routes_dict']['action']
isadmin = get_admin_permissions()
if isadmin:
log.debug('isadmin user')
item = pylons_demisauce_ws_get('cms',resource_id,isadmin=isadmin,format='html') or ''
if item.success:
return item.data
elif isadmin:
# show the admin links?
return '<a href="%s/cms/add/%s?returnurl=%s">Add Item</a>' % (
config['demisauce.url'], resource_id, urllib.quote_plus(current_url()))
else:
return ''
def email_template_get(resource_id='',**kwargs):
"""
retrieves the xml web service template, and returns as XMLNode
"""
node = pylons_demisauce_ws_get('email',resource_id,format='xml')
return node
def pylons_demisauce_ws_get(method, resource_id='', format='html', isadmin=False, cachetime=0,**kwargs):
"""
method
resource_id (which piece of content)
"""
def ws_get():
return demisauce_ws_get(method,resource_id,format=format)
mycache = cache.get_cache('demisauce.remotecontent')
if cachetime == 0:
if 'demisauce.cacheduration' in cfg.CFG:
cachetime = int(cfg.CFG['demisauce.cacheduration'])
# Get the value, this will create the cache copy the first time
# and any time it expires (in seconds, so 3600 = one hour)
myvalue = mycache.get_value('%s-%s-%s' % (method,resource_id,format), createfunc=ws_get,expiretime=cachetime)
return myvalue
|
from django.core.management.base import BaseCommand
from django.core.management import call_command
class Command(BaseCommand):
help = 'Clears and refills ephemeral things, like the cache.'
def handle(self, *args, **kwargs):
self.stdout.write(self.style.MIGRATE_LABEL('Refreshing ephemeral stores...'))
call_command('clear_cache')
call_command('setup_scheduler')
call_command('build_version_cache')
self.stdout.write(self.style.MIGRATE_SUCCESS('All done :)'))
|
cars = 100
space_in_car = 4
drivers = 30
passengers = 90
cars_driven = drivers
cars_not_driven = cars - drivers
carpool_capacity = cars_driven * space_in_car
average_passengers_per_car = passengers/cars_driven
print "there are",cars,"cars in total"
print "there are only",passengers,"passengers in total"
print average_passengers_per_car
|
import os
import numpy as np
from kadai2 import GNN
def read_file(path):
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open(path) as f:
n=int(f.readline())
adjust_matrix=np.zeros((n,n))
for i in range(n):
l=list(map(int,f.readline().split()))
for j in range(n):
adjust_matrix[i][j]=l[j]
return n,adjust_matrix
def get_data(batch,idx):
tmp = os.getcwd()
path="../datasets/train/"+str(batch[idx])+"_graph.txt"
n,r=read_file(path)
path="../datasets/train/"+str(batch[idx])+"_label.txt"
with open(path) as f:
y=int(f.readline())
os.chdir(tmp)
return n,r,y
#学習データ2000個(学習用: 1600, 検証用: 400)
class Train:
def __init__(self,W,a,b,minibatch=50):
self.minibatch=minibatch
self.W=W
self.a=a
self.b=b
def batch(self, all_data_number=2000, train=1600):
self.all_data_number=all_data_number
self.train=train
l=list(range(all_data_number))
np.random.shuffle(l)
return l
def SGD(self, batch_list, d=8, update_way="SGD"):
SUM_delta_W=0
SUM_delta_a=0
SUM_delta_b=0
for i in range(self.train):
n,r,y=get_data(batch_list,i)
self.x=np.r_[np.ones((1,n)),np.zeros((d-1,n))]
shoki=GNN(r)
L=shoki.forward(self.W,self.a,self.b,self.x,y)
delta_W,delta_a,delta_b=shoki.gradient(self.W,self.a,self.b)
SUM_delta_W+=delta_W
SUM_delta_a+=delta_a
SUM_delta_b+=delta_b
if i!=0 and i%(self.minibatch-1)==0:
SUM_delta_W/=self.minibatch
SUM_delta_a/=self.minibatch
SUM_delta_b/=self.minibatch
if update_way=="SGD":
self.W,self.a,self.b=shoki.update(SUM_delta_W,SUM_delta_a,SUM_delta_b)
elif update_way=="M-SGD":
self.W,self.a,self.b=shoki.update2(SUM_delta_W,SUM_delta_a,SUM_delta_b)
else:
print("None such a update way")
SUM_delta_W=0
SUM_delta_a=0
SUM_delta_b=0
def accuracy(self,epoch):
l=[]
cnt=0
for i in range(self.train):
n,r,y=get_data(i)
shoki=GNN(r)
y_hat=shoki.yosokuti(self.W,self.a,self.b,self.x)
if y_hat==y:
cnt+=1
l.append(cnt/self.train)
cnt=0
for i in range(self.train,self.all_data_number):
n,r,y=get_data(i)
shoki=GNN(r)
y_hat=shoki.yosokuti(self.W,self.a,self.b,self.x)
if y_hat==y:
cnt+=1
l.append(cnt/(self.all_data_number-self.train))
return (epoch,":",l)
epoch=100
np.random.seed(42)
d=8
W=np.random.normal(0,0.4,(d,d))
a=np.random.normal(0,0.4,(1,d))
b=0
t=Train(W,a,b)
batch_list=t.batch()
#print(batch_list)
for i in range(epoch):
t.SGD(batch_list)
if i==10:
result=t.accuracy
print(result)
|
# http://www.geeksforgeeks.org/dynamic-programming-subset-sum-problem/
# https://www.youtube.com/watch?v=s6FhG--P7z0
def subset_sum(input_set, length, sum):
if sum == 0:
return True
if length == 0 and sum != 0:
return False
return subset_sum(input_set, length - 1, sum) or subset_sum(input_set, length - 1, sum - input_set[length - 1])
def subset_sum_dp(input_set, sum):
length = len(input_set)
L = []
# L.append([True for i in range(length + 1)])
for i in range(sum + 1):
L.append([])
for j in range(length + 1):
if i == 0:
L[i].append(True)
elif j == 0:
L[i].append(False)
else:
L[i].append(L[i][j - 1])
if i >= input_set[j-1]:
L[i][j] = L[i][j] or L[i - input_set[j-1]][j-1]
return L[sum][length]
def subset_sum_dp_2_tushar(input_set, sum):
length = len(input_set)
L = [[None for j in range(sum + 1)] for i in range(length + 1)]
for i in range(length + 1):
for j in range(sum + 1):
if i == 0:
L[i][j] = False
elif j == 0:
L[i][j] = True
elif j < input_set[i-1]:
L[i][j] = L[i-1][j]
else:
L[i][j] = L[i-1][j] or L[i-1][j-input_set[i-1]]
print(L)
return L[length][sum]
input = [3, 34, 4, 12, 5, 2]
print(subset_sum(input, 6, 17))
print(subset_sum_dp(input, 17))
print(subset_sum_dp_2_tushar(input, 17))
|
# Generated by Django 2.1.2 on 2018-10-24 13:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_auto_20181018_1604'),
]
operations = [
migrations.AddField(
model_name='user',
name='password_reset_token',
field=models.CharField(blank=True, db_index=True, default=None, max_length=40, null=True, unique=True),
),
]
|
import peewee
from vocabulary import settings
from vocabulary.settings import DB_PORT, DB_HOST, DB_USER, DB_NAME
db = peewee.PostgresqlDatabase(
DB_NAME,
user=DB_USER,
host=DB_HOST, port=DB_PORT)
|
# server endpoints
import json
import tornado.web
from database import db
# OUR MEMORY DB
users = dict() # ie: {"uname": {"location": "brighton", "type": "driver"}}
notifs = dict() # ie: {"driver": {"dest": "brighton", "origin": "london"}}
class LoginHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header('Access-Control-Allow-Origin', '*')
self.set_header('Access-Control-Allow-Headers', '*')
self.set_header('Access-Control-Max-Age', 1000)
self.set_header('Content-type', 'application/json')
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
self.set_header('Access-Control-Allow-Headers',
'Content-Type, Access-Control-Allow-Origin, Access-Control-Allow-Headers, X-Requested-By, Access-Control-Allow-Methods')
def OPTIONS(self):
pass
def get(self):
self.write({"Hello": "FUBER!"})
def post(self):
try:
body = json.loads(self.request.body) # Try to load the body as a JSON object
except:
pass
else:
uname = body["uname"]
location = body["location"]
type_ = body["type"]
doc_ref = db.collection("users").document(uname)
if doc_ref.get().exists:
self.set_status(500)
self.write({"FAIL": "user already in DB", "Status": "500"})
else:
# users[uname] = {"location": location, "type": type_}
isdriver = True
if type_ == "passenger":
isdriver = False
doc_ref.set({
u'uname': uname,
u'location': location,
u'isdriver': isdriver
})
self.write({"Success": "User saved to database", "Status": "200"})
class LocateHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def get(self):
city = self.get_argument('city')
result = []
doc_ref = db.collection("users").get()
for u in doc_ref:
if u.get("isdriver") and u.get("location") == city:
result.append(u.get("uname"))
# for u in users:
# if users[u]["type"] == "driver" and users[u]["location"] == city:
# result.append(u)
self.set_header("Content-Type", "text/plain")
self.finish(json.dumps(result))
class BookHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def post(self):
try:
body = json.loads(self.request.body) # Try to load the body as a JSON object
except:
pass
else:
driver = body["driver"]
origin = body["origin"]
dest = body["dest"]
notifs[driver] = {"dest": dest, "origin": origin}
print(notifs)
self.write({"Success": "Notif written", "Status": "200"})
class NotifHandler(tornado.web.RequestHandler):
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
def get(self):
driver = self.get_argument('driver')
result = []
for n in notifs:
if n == driver:
result.append(notifs[n])
self.set_header("Content-Type", "text/plain")
self.finish(json.dumps(result))
# class RegisterHandler(tornado.web.RequestHandler):
# def set_default_headers(self):
# self.set_header("Access-Control-Allow-Origin", "*")
# self.set_header("Access-Control-Allow-Headers", "x-requested-with")
# self.set_header('Access-Control-Allow-Methods', 'POST, GET, OPTIONS')
# def get(self):
# self.write({"Hello": "FUBER!"})
# def post(self):
# try:
# body = json.loads(self.request.body) # Try to load the body as a JSON object
# except:
# pass
# else:
# uname = body["uname"]
# location = body["location"]
# type_ = body["type"]
# password = body["password"]
# doc_ref = db.collection("users").document(uname)
# if doc_ref.get().exists:
# self.set_status(500)
# self.write({"FAIL": "user already in DB", "Status": "500"})
# else:
# doc_ref.set({
# u'uname': uname,
# u'location': location,
# u'isdriver': isdriver
# u'password' : password
# })
# self.write({"Success": "User saved to database", "Status": "200"})
|
import random
Terrains = {
'plain': {
'color': '#71CD00'
},
'hill': {
'color': '#505355'
},
'water': {
'color': '#5D88F8'
},
'sand': {
'color': '#F9CF29'
},
'forest': {
'color': '#10A71E'
},
'city': {
'color': '#A1A5AA'
}
}
def choose_random_terrain():
random_terrain_seed = random.randint(0, 100)
terrain = 'plain'
if 0 < random_terrain_seed < 20:
terrain = 'forest'
elif 20 < random_terrain_seed < 25:
terrain = 'hill'
elif 50 < random_terrain_seed < 60:
terrain = 'water'
elif 70 < random_terrain_seed < 90:
terrain = 'sand'
elif 90 < random_terrain_seed < 100:
terrain = 'city'
return terrain
|
import csv
import numpy as np
columns_name = "代號,名稱,營收成長率,淨利成長率,營運現金流量成長率,負債比率,固定資產,資產報酬率,股東權益報酬率,資產周轉率,應收帳款周轉率,本益比,每股營收比"
def stock_indicator_reader():
data = []
stock_ids = []
stock_names = []
for line in open('indicator_predictor/data/indicator_data.csv', 'r').readlines()[1:]:
splitted = line.split(",")
stock_ids.append(int(splitted[0]))
stock_names.append(splitted[1])
data.append([float(x) for x in splitted[2:]])
# Normalize indicator
data = np.array(data)
data -= data.mean(axis = 0)
data /= data.std(axis = 0)
# average
col1 = ((data[:,0] + data[:,1] + data[:,2])/3)[:, np.newaxis]
col2 = ((data[:,3] + data[:,4])/2)[:, np.newaxis]
col3 = ((data[:,5] + data[:,6])/2)[:, np.newaxis]
col4 = ((data[:,7] + data[:,8])/2)[:, np.newaxis]
col5 = ((data[:, 9] + data[:, 10]) / 2)[:, np.newaxis]
return stock_ids, stock_names, np.concatenate((col1, col2, col3, col4, col5), axis=1)
stock_ids, stock_names, indicator_data = stock_indicator_reader()
def getStockRankNum(aspect):
if aspect in [0,2,3]:
return np.argsort(indicator_data[:,aspect], axis=-1, kind='quicksort', order=None)[::-1]
else:
return np.argsort(indicator_data[:, aspect], axis=-1, kind='quicksort', order=None)
StockRankNum = []
for i in range(5):
StockRankNum.append(getStockRankNum(i))
if __name__ == "__main__":
print(stock_ids)
print(stock_names)
print(indicator_data)
print(indicator_data.shape)
print(StockRankNum) |
import random
b = random.randint(1,20)
a = [random.randint(-10,10) for i in range(0,b)]
print(a)
z = 0
k = 0
n = 0
for j in range (0,b):
if a[j]//3==0 or -(a[j])//3==0:
k+=1
if a[j]>0:
z=z+a[j]
n+=1
m = z//n
if m>0:
a.append(m)
a.insert(0,k)
print(a)
|
######################################################################################
## ALGORTITMO RSA _ COMPUTACION CIENTIFICA _ Universidad de Medellin. ##
## Marzo - 2016 ##
## DlsnLz ##
## ##
######################################################################################
# +++++++++++++++++++++++ CODIGO RSA +++++++++++++++++++++++++++
# REALIZADO: Basando todo el proceso en las distintas funciones que se han visto en clase y muchas otras demas cosas con ayudas de la internet y con # mucho del ingenio logico personal.
# Ayudas textos y Explicaciones: logica para programacion de computadores _ Gabriel Vasquez L _ Biblioteca udem _ cap 4 - pag 62,96 ; Fundamentos de # programacion (algoritmos y estructura de datos)_ Luis joyanes Aguilar _ Biblioteca udem _ cap 4 y 5; http://es.slideshare.net/jpadillaa/criptografia- # asimetrica-rsa _ Diapositiva 22; http://serdis.dis.ulpgc.es/~ii-cript/RSA.pdf; http://www.lawebdelprogramador.com/codigo/Python/2413-Determinar-si-un- # numero-es-primo-o-no.html.
# Algoritmo RSA, generador de las claves publicas y privadas dados dos numeros primos, validando en medio de su proceso condiciones que de una u otra # manera van a ser dependientes consecutivos de cada una de las funciones y los arreglos tratados.
# la funcion myrsa, fue definida en este codigo como algoritmo_rsa; debido a que hubieron algunos problemas de confusion.
# Dados dos numeros, esta funcion se encargara de identificar cual es el menor, necesaria en el algoritmo para que cuando se ejecute el proceso de la # busqueda del maximo comun divisor los numeros ingresados sean ordenados de tal manera que el mas grande sea mayor que el menor (es obvio pero por # claridad,se menciona)
def menornumero (x,y):
if x < y:
return x
else:
return y
# Funcion que identificara el maximo comun divisor, teniendo presente la funcion anterior, pues, al ser dados los numeros esta los ordenara de manera que # aunque se obvie el mayor sea mas grande que el menor.
def maxcomdiv (x,y):
m = menornumero(x,y)
for i in range (m,0,-1):
if x % i == 0 and y % i == 0:
return i
# Esta funcion define la segunda condicion que debe cumplir el numero "e", me dice que 1 < e < funcioneuler; y ademas que el numero "e" debe ser coprimo # con el modulo (linea 41,42)
def coprimos (a,b):
m = maxcomdiv (a,b)
if m==1:
return 1
else:
return 0
# Funcion que identifica si al ingresar un numero cumple con las condiciones que pide, las cuales dicen literalmente que en una lista de numeros del # dos al x-1 (pues esta dentro del ciclo for), divida a x entre todos los numeros antes del numero en cuestion y busque los numeros que cumplan que # x mod i == 1, estos seran primos; de ese modo, si, al ingresar un numero cumple la condicion entonces este sera primo.
def fprim (t):
for i in range (2, t):
if t % i == 0:
return 0
return 1
# Funcion que multiplicara los numeros que seran ingresados por el usuario y nuevamente seran utilizados como resultados al final del codigo donde se # muestra como; modulo, parte de la llave publica y privada.
def modulo(p,q):
return (p*q)
# Funcion de Euler, necesaria en el algoritmo para hallar "e" y "d", pues, con "e" necesita que sea coprimo y con "d" es necesario que la multiplicacion # de e por d mod funcioneuler ==1; es decir (en este algoritmo) e * d % phi_n(p,q) == 1, si esto se cumple entonces sera "d".
def phi_n(p,q):
return (p*q-p-q+1)
# Se imprime en pantalla el texto, dando instrucciones del tipo de numeros que se requieren y el por que el proceso se demorara un poco, en dado caso.
# Las instruciones aparecen debido a que si se ingresan numeros no primos el proceso se parara y tocara iniciar nuevamente(el programa se para).
print ("\nPara generar sus claves siga las instrucciones que se le piden a continuacion...")
print ("\n++++++++++++++++++++++++++++++++++INSTRUCCIONES+++++++++++++++++++++++++++++++++++++++")
print ("\n++ Tenga en cuenta que los numeros a ingresar tienen que ser primos, si no, sus llaves no seran generadas.")
print ("\n++ Aqui algunos de ellos: 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89.")
print ("\n++ Si al ingresar dos numeros grandes observa que el proceso se demora, no se altere, solo espere que pronto sus llaves seran generadas.")
# input palabra reservada del diccionario de python la cual pide que se ingrese un dato(numeros en este caso); estos numeros son aleatorios y primos, # ademas, son estos datos son los que validaran el proceso de la funcion modulo.
p=(input ("\nPrimer numero aleatorio:"))
q=(input ("\nSegundo numero aleatorio:"))
def algoritmo_rsa (p,q):
if fprim(p)==0 or fprim(q)==0:
return '\nEstas seguro que los numeros aleatorios que acabas de ingresar son correctos'
else:
# Hallar "e" tal que 1< e <phi_n(p,q) y mcd(e, phi_n(p,q)) = 1
# "e" Debe ser un entero positivo coprimo con phi_n(p,q) y menor que este.
# Este ciclo buscara en la lista desde el numero 2 hasta el valor de phi_n(p,q)-1 un numero que sea coprimo con "e", y, este numero sera el valor que # quedara valiendo "e".
for e in range(2,phi_n(p,q)):
if coprimos (phi_n(p,q), e) == 1:
e
# Este ciclo cumplira la condicion mencionada en el apartado de la funcioneuler que dice que "d" sera igual a la multiplicacion de e por d mod # phi_n(p,q) ==1,teniendo presente que es un ciclo y su rango esta comprendido en (phi_n(p,q),2)
for d in range(phi_n(p,q)*2):
if e * d % phi_n(p,q) == 1:
d
# lineas que imprimiran en pantalla respectivamente; modulo, llave publica y llave privada.
print "\nphi_n es : (",phi_n(p,q),")"
print "\nEl modulo es:(",modulo(p,q),")"
print "\nLa llave publica es: (",modulo(p,q),",",e,")"
print "\nLa llave privada es: (",modulo(p,q),",",d,")"
print algoritmo_rsa(p,q) |
from weatherdashboard.models import Country, Location
from django.core.management.base import BaseCommand
all_countries_data = [
{
"countryCode": "AD",
"countryName": "Andorra",
"currencyCode": "EUR",
"population": "84000",
"capital": "Andorra la Vella",
"continentName": "Europe"
},
{
"countryCode": "AE",
"countryName": "United Arab Emirates",
"currencyCode": "AED",
"population": "4975593",
"capital": "Abu Dhabi",
"continentName": "Asia"
},
{
"countryCode": "AF",
"countryName": "Afghanistan",
"currencyCode": "AFN",
"population": "29121286",
"capital": "Kabul",
"continentName": "Asia"
},
{
"countryCode": "AG",
"countryName": "Antigua and Barbuda",
"currencyCode": "XCD",
"population": "86754",
"capital": "St. John's",
"continentName": "North America"
},
{
"countryCode": "AI",
"countryName": "Anguilla",
"currencyCode": "XCD",
"population": "13254",
"capital": "The Valley",
"continentName": "North America"
},
{
"countryCode": "AL",
"countryName": "Albania",
"currencyCode": "ALL",
"population": "2986952",
"capital": "Tirana",
"continentName": "Europe"
},
{
"countryCode": "AM",
"countryName": "Armenia",
"currencyCode": "AMD",
"population": "2968000",
"capital": "Yerevan",
"continentName": "Asia"
},
{
"countryCode": "AO",
"countryName": "Angola",
"currencyCode": "AOA",
"population": "13068161",
"capital": "Luanda",
"continentName": "Africa"
},
{
"countryCode": "AQ",
"countryName": "Antarctica",
"currencyCode": "",
"population": "0",
"capital": "",
"continentName": "Antarctica"
},
{
"countryCode": "AR",
"countryName": "Argentina",
"currencyCode": "ARS",
"population": "41343201",
"capital": "Buenos Aires",
"continentName": "South America"
},
{
"countryCode": "AS",
"countryName": "American Samoa",
"currencyCode": "USD",
"population": "57881",
"capital": "Pago Pago",
"continentName": "Oceania"
},
{
"countryCode": "AT",
"countryName": "Austria",
"currencyCode": "EUR",
"population": "8205000",
"capital": "Vienna",
"continentName": "Europe"
},
{
"countryCode": "AU",
"countryName": "Australia",
"currencyCode": "AUD",
"population": "21515754",
"capital": "Canberra",
"continentName": "Oceania"
},
{
"countryCode": "AW",
"countryName": "Aruba",
"currencyCode": "AWG",
"population": "71566",
"capital": "Oranjestad",
"continentName": "North America"
},
{
"countryCode": "AX",
"countryName": "Åland",
"currencyCode": "EUR",
"population": "26711",
"capital": "Mariehamn",
"continentName": "Europe"
},
{
"countryCode": "AZ",
"countryName": "Azerbaijan",
"currencyCode": "AZN",
"population": "8303512",
"capital": "Baku",
"continentName": "Asia"
},
{
"countryCode": "BA",
"countryName": "Bosnia and Herzegovina",
"currencyCode": "BAM",
"population": "4590000",
"capital": "Sarajevo",
"continentName": "Europe"
},
{
"countryCode": "BB",
"countryName": "Barbados",
"currencyCode": "BBD",
"population": "285653",
"capital": "Bridgetown",
"continentName": "North America"
},
{
"countryCode": "BD",
"countryName": "Bangladesh",
"currencyCode": "BDT",
"population": "156118464",
"capital": "Dhaka",
"continentName": "Asia"
},
{
"countryCode": "BE",
"countryName": "Belgium",
"currencyCode": "EUR",
"population": "10403000",
"capital": "Brussels",
"continentName": "Europe"
},
{
"countryCode": "BF",
"countryName": "Burkina Faso",
"currencyCode": "XOF",
"population": "16241811",
"capital": "Ouagadougou",
"continentName": "Africa"
},
{
"countryCode": "BG",
"countryName": "Bulgaria",
"currencyCode": "BGN",
"population": "7148785",
"capital": "Sofia",
"continentName": "Europe"
},
{
"countryCode": "BH",
"countryName": "Bahrain",
"currencyCode": "BHD",
"population": "738004",
"capital": "Manama",
"continentName": "Asia"
},
{
"countryCode": "BI",
"countryName": "Burundi",
"currencyCode": "BIF",
"population": "9863117",
"capital": "Bujumbura",
"continentName": "Africa"
},
{
"countryCode": "BJ",
"countryName": "Benin",
"currencyCode": "XOF",
"population": "9056010",
"capital": "Porto-Novo",
"continentName": "Africa"
},
{
"countryCode": "BL",
"countryName": "Saint Barthélemy",
"currencyCode": "EUR",
"population": "8450",
"capital": "Gustavia",
"continentName": "North America"
},
{
"countryCode": "BM",
"countryName": "Bermuda",
"currencyCode": "BMD",
"population": "65365",
"capital": "Hamilton",
"continentName": "North America"
},
{
"countryCode": "BN",
"countryName": "Brunei",
"currencyCode": "BND",
"population": "395027",
"capital": "Bandar Seri Begawan",
"continentName": "Asia"
},
{
"countryCode": "BO",
"countryName": "Bolivia",
"currencyCode": "BOB",
"population": "9947418",
"capital": "Sucre",
"continentName": "South America"
},
{
"countryCode": "BQ",
"countryName": "Bonaire",
"currencyCode": "USD",
"population": "18012",
"capital": "Kralendijk",
"continentName": "North America"
},
{
"countryCode": "BR",
"countryName": "Brazil",
"currencyCode": "BRL",
"population": "201103330",
"capital": "Brasília",
"continentName": "South America"
},
{
"countryCode": "BS",
"countryName": "Bahamas",
"currencyCode": "BSD",
"population": "301790",
"capital": "Nassau",
"continentName": "North America"
},
{
"countryCode": "BT",
"countryName": "Bhutan",
"currencyCode": "BTN",
"population": "699847",
"capital": "Thimphu",
"continentName": "Asia"
},
{
"countryCode": "BV",
"countryName": "Bouvet Island",
"currencyCode": "NOK",
"population": "0",
"capital": "",
"continentName": "Antarctica"
},
{
"countryCode": "BW",
"countryName": "Botswana",
"currencyCode": "BWP",
"population": "2029307",
"capital": "Gaborone",
"continentName": "Africa"
},
{
"countryCode": "BY",
"countryName": "Belarus",
"currencyCode": "BYR",
"population": "9685000",
"capital": "Minsk",
"continentName": "Europe"
},
{
"countryCode": "BZ",
"countryName": "Belize",
"currencyCode": "BZD",
"population": "314522",
"capital": "Belmopan",
"continentName": "North America"
},
{
"countryCode": "CA",
"countryName": "Canada",
"currencyCode": "CAD",
"population": "33679000",
"capital": "Ottawa",
"continentName": "North America"
},
{
"countryCode": "CC",
"countryName": "Cocos [Keeling] Islands",
"currencyCode": "AUD",
"population": "628",
"capital": "West Island",
"continentName": "Asia"
},
{
"countryCode": "CD",
"countryName": "Democratic Republic of the Congo",
"currencyCode": "CDF",
"population": "70916439",
"capital": "Kinshasa",
"continentName": "Africa"
},
{
"countryCode": "CF",
"countryName": "Central African Republic",
"currencyCode": "XAF",
"population": "4844927",
"capital": "Bangui",
"continentName": "Africa"
},
{
"countryCode": "CG",
"countryName": "Republic of the Congo",
"currencyCode": "XAF",
"population": "3039126",
"capital": "Brazzaville",
"continentName": "Africa"
},
{
"countryCode": "CH",
"countryName": "Switzerland",
"currencyCode": "CHF",
"population": "7581000",
"capital": "Bern",
"continentName": "Europe"
},
{
"countryCode": "CI",
"countryName": "Ivory Coast",
"currencyCode": "XOF",
"population": "21058798",
"capital": "Yamoussoukro",
"continentName": "Africa"
},
{
"countryCode": "CK",
"countryName": "Cook Islands",
"currencyCode": "NZD",
"population": "21388",
"capital": "Avarua",
"continentName": "Oceania"
},
{
"countryCode": "CL",
"countryName": "Chile",
"currencyCode": "CLP",
"population": "16746491",
"capital": "Santiago",
"continentName": "South America"
},
{
"countryCode": "CM",
"countryName": "Cameroon",
"currencyCode": "XAF",
"population": "19294149",
"capital": "Yaoundé",
"continentName": "Africa"
},
{
"countryCode": "CN",
"countryName": "China",
"currencyCode": "CNY",
"population": "1330044000",
"capital": "Beijing",
"continentName": "Asia"
},
{
"countryCode": "CO",
"countryName": "Colombia",
"currencyCode": "COP",
"population": "47790000",
"capital": "Bogotá",
"continentName": "South America"
},
{
"countryCode": "CR",
"countryName": "Costa Rica",
"currencyCode": "CRC",
"population": "4516220",
"capital": "San José",
"continentName": "North America"
},
{
"countryCode": "CU",
"countryName": "Cuba",
"currencyCode": "CUP",
"population": "11423000",
"capital": "Havana",
"continentName": "North America"
},
{
"countryCode": "CV",
"countryName": "Cape Verde",
"currencyCode": "CVE",
"population": "508659",
"capital": "Praia",
"continentName": "Africa"
},
{
"countryCode": "CW",
"countryName": "Curacao",
"currencyCode": "ANG",
"population": "141766",
"capital": "Willemstad",
"continentName": "North America"
},
{
"countryCode": "CX",
"countryName": "Christmas Island",
"currencyCode": "AUD",
"population": "1500",
"capital": "Flying Fish Cove",
"continentName": "Asia"
},
{
"countryCode": "CY",
"countryName": "Cyprus",
"currencyCode": "EUR",
"population": "1102677",
"capital": "Nicosia",
"continentName": "Europe"
},
{
"countryCode": "CZ",
"countryName": "Czechia",
"currencyCode": "CZK",
"population": "10476000",
"capital": "Prague",
"continentName": "Europe"
},
{
"countryCode": "DE",
"countryName": "Germany",
"currencyCode": "EUR",
"population": "81802257",
"capital": "Berlin",
"continentName": "Europe"
},
{
"countryCode": "DJ",
"countryName": "Djibouti",
"currencyCode": "DJF",
"population": "740528",
"capital": "Djibouti",
"continentName": "Africa"
},
{
"countryCode": "DK",
"countryName": "Denmark",
"currencyCode": "DKK",
"population": "5484000",
"capital": "Copenhagen",
"continentName": "Europe"
},
{
"countryCode": "DM",
"countryName": "Dominica",
"currencyCode": "XCD",
"population": "72813",
"capital": "Roseau",
"continentName": "North America"
},
{
"countryCode": "DO",
"countryName": "Dominican Republic",
"currencyCode": "DOP",
"population": "9823821",
"capital": "Santo Domingo",
"continentName": "North America"
},
{
"countryCode": "DZ",
"countryName": "Algeria",
"currencyCode": "DZD",
"population": "34586184",
"capital": "Algiers",
"continentName": "Africa"
},
{
"countryCode": "EC",
"countryName": "Ecuador",
"currencyCode": "USD",
"population": "14790608",
"capital": "Quito",
"continentName": "South America"
},
{
"countryCode": "EE",
"countryName": "Estonia",
"currencyCode": "EUR",
"population": "1291170",
"capital": "Tallinn",
"continentName": "Europe"
},
{
"countryCode": "EG",
"countryName": "Egypt",
"currencyCode": "EGP",
"population": "80471869",
"capital": "Cairo",
"continentName": "Africa"
},
{
"countryCode": "EH",
"countryName": "Western Sahara",
"currencyCode": "MAD",
"population": "273008",
"capital": "Laâyoune / El Aaiún",
"continentName": "Africa"
},
{
"countryCode": "ER",
"countryName": "Eritrea",
"currencyCode": "ERN",
"population": "5792984",
"capital": "Asmara",
"continentName": "Africa"
},
{
"countryCode": "ES",
"countryName": "Spain",
"currencyCode": "EUR",
"population": "46505963",
"capital": "Madrid",
"continentName": "Europe"
},
{
"countryCode": "ET",
"countryName": "Ethiopia",
"currencyCode": "ETB",
"population": "88013491",
"capital": "Addis Ababa",
"continentName": "Africa"
},
{
"countryCode": "FI",
"countryName": "Finland",
"currencyCode": "EUR",
"population": "5244000",
"capital": "Helsinki",
"continentName": "Europe"
},
{
"countryCode": "FJ",
"countryName": "Fiji",
"currencyCode": "FJD",
"population": "875983",
"capital": "Suva",
"continentName": "Oceania"
},
{
"countryCode": "FK",
"countryName": "Falkland Islands",
"currencyCode": "FKP",
"population": "2638",
"capital": "Stanley",
"continentName": "South America"
},
{
"countryCode": "FM",
"countryName": "Micronesia",
"currencyCode": "USD",
"population": "107708",
"capital": "Palikir",
"continentName": "Oceania"
},
{
"countryCode": "FO",
"countryName": "Faroe Islands",
"currencyCode": "DKK",
"population": "48228",
"capital": "Tórshavn",
"continentName": "Europe"
},
{
"countryCode": "FR",
"countryName": "France",
"currencyCode": "EUR",
"population": "64768389",
"capital": "Paris",
"continentName": "Europe"
},
{
"countryCode": "GA",
"countryName": "Gabon",
"currencyCode": "XAF",
"population": "1545255",
"capital": "Libreville",
"continentName": "Africa"
},
{
"countryCode": "GB",
"countryName": "United Kingdom",
"currencyCode": "GBP",
"population": "62348447",
"capital": "London",
"continentName": "Europe"
},
{
"countryCode": "GD",
"countryName": "Grenada",
"currencyCode": "XCD",
"population": "107818",
"capital": "St. George's",
"continentName": "North America"
},
{
"countryCode": "GE",
"countryName": "Georgia",
"currencyCode": "GEL",
"population": "4630000",
"capital": "Tbilisi",
"continentName": "Asia"
},
{
"countryCode": "GF",
"countryName": "French Guiana",
"currencyCode": "EUR",
"population": "195506",
"capital": "Cayenne",
"continentName": "South America"
},
{
"countryCode": "GG",
"countryName": "Guernsey",
"currencyCode": "GBP",
"population": "65228",
"capital": "St Peter Port",
"continentName": "Europe"
},
{
"countryCode": "GH",
"countryName": "Ghana",
"currencyCode": "GHS",
"population": "24339838",
"capital": "Accra",
"continentName": "Africa"
},
{
"countryCode": "GI",
"countryName": "Gibraltar",
"currencyCode": "GIP",
"population": "27884",
"capital": "Gibraltar",
"continentName": "Europe"
},
{
"countryCode": "GL",
"countryName": "Greenland",
"currencyCode": "DKK",
"population": "56375",
"capital": "Nuuk",
"continentName": "North America"
},
{
"countryCode": "GM",
"countryName": "Gambia",
"currencyCode": "GMD",
"population": "1593256",
"capital": "Bathurst",
"continentName": "Africa"
},
{
"countryCode": "GN",
"countryName": "Guinea",
"currencyCode": "GNF",
"population": "10324025",
"capital": "Conakry",
"continentName": "Africa"
},
{
"countryCode": "GP",
"countryName": "Guadeloupe",
"currencyCode": "EUR",
"population": "443000",
"capital": "Basse-Terre",
"continentName": "North America"
},
{
"countryCode": "GQ",
"countryName": "Equatorial Guinea",
"currencyCode": "XAF",
"population": "1014999",
"capital": "Malabo",
"continentName": "Africa"
},
{
"countryCode": "GR",
"countryName": "Greece",
"currencyCode": "EUR",
"population": "11000000",
"capital": "Athens",
"continentName": "Europe"
},
{
"countryCode": "GS",
"countryName": "South Georgia and the South Sandwich Islands",
"currencyCode": "GBP",
"population": "30",
"capital": "Grytviken",
"continentName": "Antarctica"
},
{
"countryCode": "GT",
"countryName": "Guatemala",
"currencyCode": "GTQ",
"population": "13550440",
"capital": "Guatemala City",
"continentName": "North America"
},
{
"countryCode": "GU",
"countryName": "Guam",
"currencyCode": "USD",
"population": "159358",
"capital": "Hagåtña",
"continentName": "Oceania"
},
{
"countryCode": "GW",
"countryName": "Guinea-Bissau",
"currencyCode": "XOF",
"population": "1565126",
"capital": "Bissau",
"continentName": "Africa"
},
{
"countryCode": "GY",
"countryName": "Guyana",
"currencyCode": "GYD",
"population": "748486",
"capital": "Georgetown",
"continentName": "South America"
},
{
"countryCode": "HK",
"countryName": "Hong Kong",
"currencyCode": "HKD",
"population": "6898686",
"capital": "Hong Kong",
"continentName": "Asia"
},
{
"countryCode": "HM",
"countryName": "Heard Island and McDonald Islands",
"currencyCode": "AUD",
"population": "0",
"capital": "",
"continentName": "Antarctica"
},
{
"countryCode": "HN",
"countryName": "Honduras",
"currencyCode": "HNL",
"population": "7989415",
"capital": "Tegucigalpa",
"continentName": "North America"
},
{
"countryCode": "HR",
"countryName": "Croatia",
"currencyCode": "HRK",
"population": "4284889",
"capital": "Zagreb",
"continentName": "Europe"
},
{
"countryCode": "HT",
"countryName": "Haiti",
"currencyCode": "HTG",
"population": "9648924",
"capital": "Port-au-Prince",
"continentName": "North America"
},
{
"countryCode": "HU",
"countryName": "Hungary",
"currencyCode": "HUF",
"population": "9982000",
"capital": "Budapest",
"continentName": "Europe"
},
{
"countryCode": "ID",
"countryName": "Indonesia",
"currencyCode": "IDR",
"population": "242968342",
"capital": "Jakarta",
"continentName": "Asia"
},
{
"countryCode": "IE",
"countryName": "Ireland",
"currencyCode": "EUR",
"population": "4622917",
"capital": "Dublin",
"continentName": "Europe"
},
{
"countryCode": "IL",
"countryName": "Israel",
"currencyCode": "ILS",
"population": "7353985",
"capital": "",
"continentName": "Asia"
},
{
"countryCode": "IM",
"countryName": "Isle of Man",
"currencyCode": "GBP",
"population": "75049",
"capital": "Douglas",
"continentName": "Europe"
},
{
"countryCode": "IN",
"countryName": "India",
"currencyCode": "INR",
"population": "1173108018",
"capital": "New Delhi",
"continentName": "Asia"
},
{
"countryCode": "IO",
"countryName": "British Indian Ocean Territory",
"currencyCode": "USD",
"population": "4000",
"capital": "",
"continentName": "Asia"
},
{
"countryCode": "IQ",
"countryName": "Iraq",
"currencyCode": "IQD",
"population": "29671605",
"capital": "Baghdad",
"continentName": "Asia"
},
{
"countryCode": "IR",
"countryName": "Iran",
"currencyCode": "IRR",
"population": "76923300",
"capital": "Tehran",
"continentName": "Asia"
},
{
"countryCode": "IS",
"countryName": "Iceland",
"currencyCode": "ISK",
"population": "308910",
"capital": "Reykjavik",
"continentName": "Europe"
},
{
"countryCode": "IT",
"countryName": "Italy",
"currencyCode": "EUR",
"population": "60340328",
"capital": "Rome",
"continentName": "Europe"
},
{
"countryCode": "JE",
"countryName": "Jersey",
"currencyCode": "GBP",
"population": "90812",
"capital": "Saint Helier",
"continentName": "Europe"
},
{
"countryCode": "JM",
"countryName": "Jamaica",
"currencyCode": "JMD",
"population": "2847232",
"capital": "Kingston",
"continentName": "North America"
},
{
"countryCode": "JO",
"countryName": "Jordan",
"currencyCode": "JOD",
"population": "6407085",
"capital": "Amman",
"continentName": "Asia"
},
{
"countryCode": "JP",
"countryName": "Japan",
"currencyCode": "JPY",
"population": "127288000",
"capital": "Tokyo",
"continentName": "Asia"
},
{
"countryCode": "KE",
"countryName": "Kenya",
"currencyCode": "KES",
"population": "40046566",
"capital": "Nairobi",
"continentName": "Africa"
},
{
"countryCode": "KG",
"countryName": "Kyrgyzstan",
"currencyCode": "KGS",
"population": "5776500",
"capital": "Bishkek",
"continentName": "Asia"
},
{
"countryCode": "KH",
"countryName": "Cambodia",
"currencyCode": "KHR",
"population": "14453680",
"capital": "Phnom Penh",
"continentName": "Asia"
},
{
"countryCode": "KI",
"countryName": "Kiribati",
"currencyCode": "AUD",
"population": "92533",
"capital": "Tarawa",
"continentName": "Oceania"
},
{
"countryCode": "KM",
"countryName": "Comoros",
"currencyCode": "KMF",
"population": "773407",
"capital": "Moroni",
"continentName": "Africa"
},
{
"countryCode": "KN",
"countryName": "Saint Kitts and Nevis",
"currencyCode": "XCD",
"population": "51134",
"capital": "Basseterre",
"continentName": "North America"
},
{
"countryCode": "KP",
"countryName": "North Korea",
"currencyCode": "KPW",
"population": "22912177",
"capital": "Pyongyang",
"continentName": "Asia"
},
{
"countryCode": "KR",
"countryName": "South Korea",
"currencyCode": "KRW",
"population": "48422644",
"capital": "Seoul",
"continentName": "Asia"
},
{
"countryCode": "KW",
"countryName": "Kuwait",
"currencyCode": "KWD",
"population": "2789132",
"capital": "Kuwait City",
"continentName": "Asia"
},
{
"countryCode": "KY",
"countryName": "Cayman Islands",
"currencyCode": "KYD",
"population": "44270",
"capital": "George Town",
"continentName": "North America"
},
{
"countryCode": "KZ",
"countryName": "Kazakhstan",
"currencyCode": "KZT",
"population": "15340000",
"capital": "Astana",
"continentName": "Asia"
},
{
"countryCode": "LA",
"countryName": "Laos",
"currencyCode": "LAK",
"population": "6368162",
"capital": "Vientiane",
"continentName": "Asia"
},
{
"countryCode": "LB",
"countryName": "Lebanon",
"currencyCode": "LBP",
"population": "4125247",
"capital": "Beirut",
"continentName": "Asia"
},
{
"countryCode": "LC",
"countryName": "Saint Lucia",
"currencyCode": "XCD",
"population": "160922",
"capital": "Castries",
"continentName": "North America"
},
{
"countryCode": "LI",
"countryName": "Liechtenstein",
"currencyCode": "CHF",
"population": "35000",
"capital": "Vaduz",
"continentName": "Europe"
},
{
"countryCode": "LK",
"countryName": "Sri Lanka",
"currencyCode": "LKR",
"population": "21513990",
"capital": "Colombo",
"continentName": "Asia"
},
{
"countryCode": "LR",
"countryName": "Liberia",
"currencyCode": "LRD",
"population": "3685076",
"capital": "Monrovia",
"continentName": "Africa"
},
{
"countryCode": "LS",
"countryName": "Lesotho",
"currencyCode": "LSL",
"population": "1919552",
"capital": "Maseru",
"continentName": "Africa"
},
{
"countryCode": "LT",
"countryName": "Lithuania",
"currencyCode": "EUR",
"population": "2944459",
"capital": "Vilnius",
"continentName": "Europe"
},
{
"countryCode": "LU",
"countryName": "Luxembourg",
"currencyCode": "EUR",
"population": "497538",
"capital": "Luxembourg",
"continentName": "Europe"
},
{
"countryCode": "LV",
"countryName": "Latvia",
"currencyCode": "EUR",
"population": "2217969",
"capital": "Riga",
"continentName": "Europe"
},
{
"countryCode": "LY",
"countryName": "Libya",
"currencyCode": "LYD",
"population": "6461454",
"capital": "Tripoli",
"continentName": "Africa"
},
{
"countryCode": "MA",
"countryName": "Morocco",
"currencyCode": "MAD",
"population": "33848242",
"capital": "Rabat",
"continentName": "Africa"
},
{
"countryCode": "MC",
"countryName": "Monaco",
"currencyCode": "EUR",
"population": "32965",
"capital": "Monaco",
"continentName": "Europe"
},
{
"countryCode": "MD",
"countryName": "Moldova",
"currencyCode": "MDL",
"population": "4324000",
"capital": "Chişinău",
"continentName": "Europe"
},
{
"countryCode": "ME",
"countryName": "Montenegro",
"currencyCode": "EUR",
"population": "666730",
"capital": "Podgorica",
"continentName": "Europe"
},
{
"countryCode": "MF",
"countryName": "Saint Martin",
"currencyCode": "EUR",
"population": "35925",
"capital": "Marigot",
"continentName": "North America"
},
{
"countryCode": "MG",
"countryName": "Madagascar",
"currencyCode": "MGA",
"population": "21281844",
"capital": "Antananarivo",
"continentName": "Africa"
},
{
"countryCode": "MH",
"countryName": "Marshall Islands",
"currencyCode": "USD",
"population": "65859",
"capital": "Majuro",
"continentName": "Oceania"
},
{
"countryCode": "MK",
"countryName": "Macedonia",
"currencyCode": "MKD",
"population": "2062294",
"capital": "Skopje",
"continentName": "Europe"
},
{
"countryCode": "ML",
"countryName": "Mali",
"currencyCode": "XOF",
"population": "13796354",
"capital": "Bamako",
"continentName": "Africa"
},
{
"countryCode": "MM",
"countryName": "Myanmar [Burma]",
"currencyCode": "MMK",
"population": "53414374",
"capital": "Naypyitaw",
"continentName": "Asia"
},
{
"countryCode": "MN",
"countryName": "Mongolia",
"currencyCode": "MNT",
"population": "3086918",
"capital": "Ulan Bator",
"continentName": "Asia"
},
{
"countryCode": "MO",
"countryName": "Macao",
"currencyCode": "MOP",
"population": "449198",
"capital": "Macao",
"continentName": "Asia"
},
{
"countryCode": "MP",
"countryName": "Northern Mariana Islands",
"currencyCode": "USD",
"population": "53883",
"capital": "Saipan",
"continentName": "Oceania"
},
{
"countryCode": "MQ",
"countryName": "Martinique",
"currencyCode": "EUR",
"population": "432900",
"capital": "Fort-de-France",
"continentName": "North America"
},
{
"countryCode": "MR",
"countryName": "Mauritania",
"currencyCode": "MRO",
"population": "3205060",
"capital": "Nouakchott",
"continentName": "Africa"
},
{
"countryCode": "MS",
"countryName": "Montserrat",
"currencyCode": "XCD",
"population": "9341",
"capital": "Plymouth",
"continentName": "North America"
},
{
"countryCode": "MT",
"countryName": "Malta",
"currencyCode": "EUR",
"population": "403000",
"capital": "Valletta",
"continentName": "Europe"
},
{
"countryCode": "MU",
"countryName": "Mauritius",
"currencyCode": "MUR",
"population": "1294104",
"capital": "Port Louis",
"continentName": "Africa"
},
{
"countryCode": "MV",
"countryName": "Maldives",
"currencyCode": "MVR",
"population": "395650",
"capital": "Malé",
"continentName": "Asia"
},
{
"countryCode": "MW",
"countryName": "Malawi",
"currencyCode": "MWK",
"population": "15447500",
"capital": "Lilongwe",
"continentName": "Africa"
},
{
"countryCode": "MX",
"countryName": "Mexico",
"currencyCode": "MXN",
"population": "112468855",
"capital": "Mexico City",
"continentName": "North America"
},
{
"countryCode": "MY",
"countryName": "Malaysia",
"currencyCode": "MYR",
"population": "28274729",
"capital": "Kuala Lumpur",
"continentName": "Asia"
},
{
"countryCode": "MZ",
"countryName": "Mozambique",
"currencyCode": "MZN",
"population": "22061451",
"capital": "Maputo",
"continentName": "Africa"
},
{
"countryCode": "NA",
"countryName": "Namibia",
"currencyCode": "NAD",
"population": "2128471",
"capital": "Windhoek",
"continentName": "Africa"
},
{
"countryCode": "NC",
"countryName": "New Caledonia",
"currencyCode": "XPF",
"population": "216494",
"capital": "Noumea",
"continentName": "Oceania"
},
{
"countryCode": "NE",
"countryName": "Niger",
"currencyCode": "XOF",
"population": "15878271",
"capital": "Niamey",
"continentName": "Africa"
},
{
"countryCode": "NF",
"countryName": "Norfolk Island",
"currencyCode": "AUD",
"population": "1828",
"capital": "Kingston",
"continentName": "Oceania"
},
{
"countryCode": "NG",
"countryName": "Nigeria",
"currencyCode": "NGN",
"population": "154000000",
"capital": "Abuja",
"continentName": "Africa"
},
{
"countryCode": "NI",
"countryName": "Nicaragua",
"currencyCode": "NIO",
"population": "5995928",
"capital": "Managua",
"continentName": "North America"
},
{
"countryCode": "NL",
"countryName": "Netherlands",
"currencyCode": "EUR",
"population": "16645000",
"capital": "Amsterdam",
"continentName": "Europe"
},
{
"countryCode": "NO",
"countryName": "Norway",
"currencyCode": "NOK",
"population": "5009150",
"capital": "Oslo",
"continentName": "Europe"
},
{
"countryCode": "NP",
"countryName": "Nepal",
"currencyCode": "NPR",
"population": "28951852",
"capital": "Kathmandu",
"continentName": "Asia"
},
{
"countryCode": "NR",
"countryName": "Nauru",
"currencyCode": "AUD",
"population": "10065",
"capital": "Yaren",
"continentName": "Oceania"
},
{
"countryCode": "NU",
"countryName": "Niue",
"currencyCode": "NZD",
"population": "2166",
"capital": "Alofi",
"continentName": "Oceania"
},
{
"countryCode": "NZ",
"countryName": "New Zealand",
"currencyCode": "NZD",
"population": "4252277",
"capital": "Wellington",
"continentName": "Oceania"
},
{
"countryCode": "OM",
"countryName": "Oman",
"currencyCode": "OMR",
"population": "2967717",
"capital": "Muscat",
"continentName": "Asia"
},
{
"countryCode": "PA",
"countryName": "Panama",
"currencyCode": "PAB",
"population": "3410676",
"capital": "Panama City",
"continentName": "North America"
},
{
"countryCode": "PE",
"countryName": "Peru",
"currencyCode": "PEN",
"population": "29907003",
"capital": "Lima",
"continentName": "South America"
},
{
"countryCode": "PF",
"countryName": "French Polynesia",
"currencyCode": "XPF",
"population": "270485",
"capital": "Papeete",
"continentName": "Oceania"
},
{
"countryCode": "PG",
"countryName": "Papua New Guinea",
"currencyCode": "PGK",
"population": "6064515",
"capital": "Port Moresby",
"continentName": "Oceania"
},
{
"countryCode": "PH",
"countryName": "Philippines",
"currencyCode": "PHP",
"population": "99900177",
"capital": "Manila",
"continentName": "Asia"
},
{
"countryCode": "PK",
"countryName": "Pakistan",
"currencyCode": "PKR",
"population": "184404791",
"capital": "Islamabad",
"continentName": "Asia"
},
{
"countryCode": "PL",
"countryName": "Poland",
"currencyCode": "PLN",
"population": "38500000",
"capital": "Warsaw",
"continentName": "Europe"
},
{
"countryCode": "PM",
"countryName": "Saint Pierre and Miquelon",
"currencyCode": "EUR",
"population": "7012",
"capital": "Saint-Pierre",
"continentName": "North America"
},
{
"countryCode": "PN",
"countryName": "Pitcairn Islands",
"currencyCode": "NZD",
"population": "46",
"capital": "Adamstown",
"continentName": "Oceania"
},
{
"countryCode": "PR",
"countryName": "Puerto Rico",
"currencyCode": "USD",
"population": "3916632",
"capital": "San Juan",
"continentName": "North America"
},
{
"countryCode": "PS",
"countryName": "Palestine",
"currencyCode": "ILS",
"population": "3800000",
"capital": "",
"continentName": "Asia"
},
{
"countryCode": "PT",
"countryName": "Portugal",
"currencyCode": "EUR",
"population": "10676000",
"capital": "Lisbon",
"continentName": "Europe"
},
{
"countryCode": "PW",
"countryName": "Palau",
"currencyCode": "USD",
"population": "19907",
"capital": "Melekeok",
"continentName": "Oceania"
},
{
"countryCode": "PY",
"countryName": "Paraguay",
"currencyCode": "PYG",
"population": "6375830",
"capital": "Asunción",
"continentName": "South America"
},
{
"countryCode": "QA",
"countryName": "Qatar",
"currencyCode": "QAR",
"population": "840926",
"capital": "Doha",
"continentName": "Asia"
},
{
"countryCode": "RE",
"countryName": "Réunion",
"currencyCode": "EUR",
"population": "776948",
"capital": "Saint-Denis",
"continentName": "Africa"
},
{
"countryCode": "RO",
"countryName": "Romania",
"currencyCode": "RON",
"population": "21959278",
"capital": "Bucharest",
"continentName": "Europe"
},
{
"countryCode": "RS",
"countryName": "Serbia",
"currencyCode": "RSD",
"population": "7344847",
"capital": "Belgrade",
"continentName": "Europe"
},
{
"countryCode": "RU",
"countryName": "Russia",
"currencyCode": "RUB",
"population": "140702000",
"capital": "Moscow",
"continentName": "Europe"
},
{
"countryCode": "RW",
"countryName": "Rwanda",
"currencyCode": "RWF",
"population": "11055976",
"capital": "Kigali",
"continentName": "Africa"
},
{
"countryCode": "SA",
"countryName": "Saudi Arabia",
"currencyCode": "SAR",
"population": "25731776",
"capital": "Riyadh",
"continentName": "Asia"
},
{
"countryCode": "SB",
"countryName": "Solomon Islands",
"currencyCode": "SBD",
"population": "559198",
"capital": "Honiara",
"continentName": "Oceania"
},
{
"countryCode": "SC",
"countryName": "Seychelles",
"currencyCode": "SCR",
"population": "88340",
"capital": "Victoria",
"continentName": "Africa"
},
{
"countryCode": "SD",
"countryName": "Sudan",
"currencyCode": "SDG",
"population": "35000000",
"capital": "Khartoum",
"continentName": "Africa"
},
{
"countryCode": "SE",
"countryName": "Sweden",
"currencyCode": "SEK",
"population": "9828655",
"capital": "Stockholm",
"continentName": "Europe"
},
{
"countryCode": "SG",
"countryName": "Singapore",
"currencyCode": "SGD",
"population": "4701069",
"capital": "Singapore",
"continentName": "Asia"
},
{
"countryCode": "SH",
"countryName": "Saint Helena",
"currencyCode": "SHP",
"population": "7460",
"capital": "Jamestown",
"continentName": "Africa"
},
{
"countryCode": "SI",
"countryName": "Slovenia",
"currencyCode": "EUR",
"population": "2007000",
"capital": "Ljubljana",
"continentName": "Europe"
},
{
"countryCode": "SJ",
"countryName": "Svalbard and Jan Mayen",
"currencyCode": "NOK",
"population": "2550",
"capital": "Longyearbyen",
"continentName": "Europe"
},
{
"countryCode": "SK",
"countryName": "Slovakia",
"currencyCode": "EUR",
"population": "5455000",
"capital": "Bratislava",
"continentName": "Europe"
},
{
"countryCode": "SL",
"countryName": "Sierra Leone",
"currencyCode": "SLL",
"population": "5245695",
"capital": "Freetown",
"continentName": "Africa"
},
{
"countryCode": "SM",
"countryName": "San Marino",
"currencyCode": "EUR",
"population": "31477",
"capital": "San Marino",
"continentName": "Europe"
},
{
"countryCode": "SN",
"countryName": "Senegal",
"currencyCode": "XOF",
"population": "12323252",
"capital": "Dakar",
"continentName": "Africa"
},
{
"countryCode": "SO",
"countryName": "Somalia",
"currencyCode": "SOS",
"population": "10112453",
"capital": "Mogadishu",
"continentName": "Africa"
},
{
"countryCode": "SR",
"countryName": "Suriname",
"currencyCode": "SRD",
"population": "492829",
"capital": "Paramaribo",
"continentName": "South America"
},
{
"countryCode": "SS",
"countryName": "South Sudan",
"currencyCode": "SSP",
"population": "8260490",
"capital": "Juba",
"continentName": "Africa"
},
{
"countryCode": "ST",
"countryName": "São Tomé and Príncipe",
"currencyCode": "STD",
"population": "175808",
"capital": "São Tomé",
"continentName": "Africa"
},
{
"countryCode": "SV",
"countryName": "El Salvador",
"currencyCode": "USD",
"population": "6052064",
"capital": "San Salvador",
"continentName": "North America"
},
{
"countryCode": "SX",
"countryName": "Sint Maarten",
"currencyCode": "ANG",
"population": "37429",
"capital": "Philipsburg",
"continentName": "North America"
},
{
"countryCode": "SY",
"countryName": "Syria",
"currencyCode": "SYP",
"population": "22198110",
"capital": "Damascus",
"continentName": "Asia"
},
{
"countryCode": "SZ",
"countryName": "Swaziland",
"currencyCode": "SZL",
"population": "1354051",
"capital": "Mbabane",
"continentName": "Africa"
},
{
"countryCode": "TC",
"countryName": "Turks and Caicos Islands",
"currencyCode": "USD",
"population": "20556",
"capital": "Cockburn Town",
"continentName": "North America"
},
{
"countryCode": "TD",
"countryName": "Chad",
"currencyCode": "XAF",
"population": "10543464",
"capital": "N'Djamena",
"continentName": "Africa"
},
{
"countryCode": "TF",
"countryName": "French Southern Territories",
"currencyCode": "EUR",
"population": "140",
"capital": "Port-aux-Français",
"continentName": "Antarctica"
},
{
"countryCode": "TG",
"countryName": "Togo",
"currencyCode": "XOF",
"population": "6587239",
"capital": "Lomé",
"continentName": "Africa"
},
{
"countryCode": "TH",
"countryName": "Thailand",
"currencyCode": "THB",
"population": "67089500",
"capital": "Bangkok",
"continentName": "Asia"
},
{
"countryCode": "TJ",
"countryName": "Tajikistan",
"currencyCode": "TJS",
"population": "7487489",
"capital": "Dushanbe",
"continentName": "Asia"
},
{
"countryCode": "TK",
"countryName": "Tokelau",
"currencyCode": "NZD",
"population": "1466",
"capital": "",
"continentName": "Oceania"
},
{
"countryCode": "TL",
"countryName": "East Timor",
"currencyCode": "USD",
"population": "1154625",
"capital": "Dili",
"continentName": "Oceania"
},
{
"countryCode": "TM",
"countryName": "Turkmenistan",
"currencyCode": "TMT",
"population": "4940916",
"capital": "Ashgabat",
"continentName": "Asia"
},
{
"countryCode": "TN",
"countryName": "Tunisia",
"currencyCode": "TND",
"population": "10589025",
"capital": "Tunis",
"continentName": "Africa"
},
{
"countryCode": "TO",
"countryName": "Tonga",
"currencyCode": "TOP",
"population": "122580",
"capital": "Nuku'alofa",
"continentName": "Oceania"
},
{
"countryCode": "TR",
"countryName": "Turkey",
"currencyCode": "TRY",
"population": "77804122",
"capital": "Ankara",
"continentName": "Asia"
},
{
"countryCode": "TT",
"countryName": "Trinidad and Tobago",
"currencyCode": "TTD",
"population": "1228691",
"capital": "Port of Spain",
"continentName": "North America"
},
{
"countryCode": "TV",
"countryName": "Tuvalu",
"currencyCode": "AUD",
"population": "10472",
"capital": "Funafuti",
"continentName": "Oceania"
},
{
"countryCode": "TW",
"countryName": "Taiwan",
"currencyCode": "TWD",
"population": "22894384",
"capital": "Taipei",
"continentName": "Asia"
},
{
"countryCode": "TZ",
"countryName": "Tanzania",
"currencyCode": "TZS",
"population": "41892895",
"capital": "Dodoma",
"continentName": "Africa"
},
{
"countryCode": "UA",
"countryName": "Ukraine",
"currencyCode": "UAH",
"population": "45415596",
"capital": "Kiev",
"continentName": "Europe"
},
{
"countryCode": "UG",
"countryName": "Uganda",
"currencyCode": "UGX",
"population": "33398682",
"capital": "Kampala",
"continentName": "Africa"
},
{
"countryCode": "UM",
"countryName": "U.S. Minor Outlying Islands",
"currencyCode": "USD",
"population": "0",
"capital": "",
"continentName": "Oceania"
},
{
"countryCode": "US",
"countryName": "United States",
"currencyCode": "USD",
"population": "310232863",
"capital": "Washington",
"continentName": "North America"
},
{
"countryCode": "UY",
"countryName": "Uruguay",
"currencyCode": "UYU",
"population": "3477000",
"capital": "Montevideo",
"continentName": "South America"
},
{
"countryCode": "UZ",
"countryName": "Uzbekistan",
"currencyCode": "UZS",
"population": "27865738",
"capital": "Tashkent",
"continentName": "Asia"
},
{
"countryCode": "VA",
"countryName": "Vatican City",
"currencyCode": "EUR",
"population": "921",
"capital": "Vatican City",
"continentName": "Europe"
},
{
"countryCode": "VC",
"countryName": "Saint Vincent and the Grenadines",
"currencyCode": "XCD",
"population": "104217",
"capital": "Kingstown",
"continentName": "North America"
},
{
"countryCode": "VE",
"countryName": "Venezuela",
"currencyCode": "VEF",
"population": "27223228",
"capital": "Caracas",
"continentName": "South America"
},
{
"countryCode": "VG",
"countryName": "British Virgin Islands",
"currencyCode": "USD",
"population": "21730",
"capital": "Road Town",
"continentName": "North America"
},
{
"countryCode": "VI",
"countryName": "U.S. Virgin Islands",
"currencyCode": "USD",
"population": "108708",
"capital": "Charlotte Amalie",
"continentName": "North America"
},
{
"countryCode": "VN",
"countryName": "Vietnam",
"currencyCode": "VND",
"population": "89571130",
"capital": "Hanoi",
"continentName": "Asia"
},
{
"countryCode": "VU",
"countryName": "Vanuatu",
"currencyCode": "VUV",
"population": "221552",
"capital": "Port Vila",
"continentName": "Oceania"
},
{
"countryCode": "WF",
"countryName": "Wallis and Futuna",
"currencyCode": "XPF",
"population": "16025",
"capital": "Mata-Utu",
"continentName": "Oceania"
},
{
"countryCode": "WS",
"countryName": "Samoa",
"currencyCode": "WST",
"population": "192001",
"capital": "Apia",
"continentName": "Oceania"
},
{
"countryCode": "XK",
"countryName": "Kosovo",
"currencyCode": "EUR",
"population": "1800000",
"capital": "Pristina",
"continentName": "Europe"
},
{
"countryCode": "YE",
"countryName": "Yemen",
"currencyCode": "YER",
"population": "23495361",
"capital": "Sanaa",
"continentName": "Asia"
},
{
"countryCode": "YT",
"countryName": "Mayotte",
"currencyCode": "EUR",
"population": "159042",
"capital": "Mamoudzou",
"continentName": "Africa"
},
{
"countryCode": "ZA",
"countryName": "South Africa",
"currencyCode": "ZAR",
"population": "49000000",
"capital": "Pretoria",
"continentName": "Africa"
},
{
"countryCode": "ZM",
"countryName": "Zambia",
"currencyCode": "ZMW",
"population": "13460305",
"capital": "Lusaka",
"continentName": "Africa"
},
{
"countryCode": "ZW",
"countryName": "Zimbabwe",
"currencyCode": "ZWL",
"population": "13061000",
"capital": "Harare",
"continentName": "Africa"
}
]
south_american_country_codes = [
"AR", "BO", "BR", "CL", "CO", "EC", "FK", "GF",
"GY", "PY", "PE", "GS", "SR", "UY", "VE"
]
class Command(BaseCommand):
help = 'Creates initial data'
def handle(self, *args, **options):
for country in all_countries_data:
new_country = Country.objects.create(
name=country['countryName'],
code=country['countryCode']
)
if country['countryCode'] in south_american_country_codes:
new_location = Location.objects.create(
city=country['capital'],
country=new_country
)
|
def lista_e_munte(lista):
creste = True
scade = False
for i in range(0,len(lista)-1):
j = i+1
if creste:
if lista[j]<lista[i]:
creste = False
scade = True
|
from flask.ext.admin import AdminIndexView, BaseView
from flask.ext.admin.contrib.sqla.view import ModelView
from flask_admin.contrib.fileadmin import FileAdmin
from flask.ext.security import utils
from wtforms.csrf.core import CSRFTokenField, CSRF
from flask_admin.form import SecureForm
from wtforms import PasswordField, validators
from flask import redirect, url_for, flash, g
from os import listdir, path
from config import basedir
"""
AdminBaseView: Access control for the admin panel, without this there is none!
Parent: flask.ext.admin.BaseView
"""
class AdminBaseView(BaseView):
"""Who can see the admin page?"""
def is_accessible(self):
if g.user.is_authenticated and g.user.is_admin:
return True
return False
"""This is run when a user tries to visit an admin page"""
def _handle_view(self, name, **kwargs):
# add a check to make sure that the user has permission
if not self.is_accessible():
# if not then print an error and redirect
flash("You don't have permission to go there", category="warning")
return redirect(url_for('main.index'))
"""
WhiteTeamBaseView: Access control for the admin panel, without this there is none!
Parent: flask.ext.admin.BaseView
"""
class WhiteTeamBaseView(BaseView):
"""Who can see the admin page?"""
def is_accessible(self):
if g.user.is_authenticated and g.user.is_whiteteam:
return True
return False
"""This is run when a user tries to visit an admin page"""
def _handle_view(self, name, **kwargs):
# add a check to make sure that the user has permission
if not self.is_accessible():
# if not then print an error and redirect
flash("You don't have permission to go there", category="warning")
return redirect(url_for('main.index'))
"""
WhiteTeamIndexView: Make AdminIndexView from flask.ext.admin require RBAC
Parents: flask.ext.admin.AdminIndexView, .WhiteTeamBaseView
"""
class ProtectedIndexView(AdminIndexView, WhiteTeamBaseView):
pass
"""
AdminModelView: Add RBAC to flask-admin's model view
Parents: flask.ext.admin.contrib.sqla.view.ModelView, .AdminBaseView
"""
class AdminModelView(ModelView, AdminBaseView):
pass
"""
WhiteTeamModelView: Add RBAC to flask-admin's model view
Parents: flask.ext.admin.contrib.sqla.view.ModelView, .AdminBaseView
"""
class WhiteTeamModelView(ModelView, WhiteTeamBaseView):
def is_accessible(self):
self.can_create = g.user.is_admin
self.can_edit = g.user.is_admin
self.can_delete = g.user.is_admin
return super(WhiteTeamModelView, self).is_accessible()
"""
UserModelView: The model view for users
Parent: .AdminModelView
"""
class UserModelView(AdminModelView):
# we don't need to see the huge password hash in the list display
column_exclude_list = ['password']
# this information should not be changed, so don't make it editable
form_excluded_columns = ['last_login_at', 'current_login_at',
'last_login_ip', 'current_login_ip',
'login_count', 'submissions']
# make sure the password can't be seen when typing it
form_overrides = dict(password=PasswordField)
# add a confirm password field and make sure it equals the other password field
form_extra_fields = {'password2': PasswordField('Confirm Password',
[validators.EqualTo('password', message='Passwords must match')])}
# this just sets the order of the form fields, otherwise confirm pass is on the bottom
form_columns = ('roles', 'email', 'password', 'password2', 'active')
# make sure the password is actually encrypted when it is changed or created!
def on_model_change(self, form, model, is_created):
if form.password.data:
model.password = utils.encrypt_password(model.password)
"""
RoleModelView: The model view for roles
Parent: .AdminModelView
"""
class RoleModelView(AdminModelView):
# adding, deleting or changing role names would be useless...
can_delete = False
can_create = False
form_excluded_columns = ('name')
pass
"""
InjectModelView: The model view for Injects
Parent: .AdminModelView
"""
class InjectModelView(WhiteTeamModelView):
form_args = { 'inject_doc': { 'validators': list() } }
form_choices = {'inject_doc': [(1, 1)]} #hack to give us the selector dropdown
form_excluded_columns = ('extensions')
def create_form(self, obj=None):
form = super(InjectModelView, self).create_form(obj)
form.inject_doc.choices = self.get_files()
return form
def edit_form(self, obj=None):
form = super(InjectModelView, self).edit_form(obj)
form.inject_doc.choices = self.get_files()
return form
def get_files(self):
files = filter(lambda f: not f.startswith("."), listdir(path.join(basedir, 'app', 'injects')))
files = [(x, x) for x in files]
return files
class AnnouncementModelView(WhiteTeamModelView):
pass
"""
WhiteTeamFileAdmin: The file admin view that white team can manage
Parents: FileAdmin, WhiteTeamBaseView
"""
class WhiteTeamFileAdmin(FileAdmin, WhiteTeamBaseView):
def is_accessible(self):
self.can_delete = g.user.is_admin
self.can_delete_dirs = g.user.is_admin
return super(WhiteTeamFileAdmin, self).is_accessible()
class SharedFileAdmin(WhiteTeamFileAdmin):
pass
class InjectFileAdmin(WhiteTeamFileAdmin):
pass
class InjectExtensionModelView(WhiteTeamModelView):
form_args = {"duration": {"label": "Duration (in minutes)"}}
|
from django import template
from dashboard.models import Category, Article, Tag
from dashboard.views import settings
from django.utils.html import format_html
register = template.Library()
@register.simple_tag
def highlight_query(title, query):
return format_html(title.replace(query,'<span class="highlighted">{}</span>'), query)
@register.filter
def display_humantime(value):
return value.strftime('%m-%d')
@register.inclusion_tag('dashboard/tags/banner.html')
def load_banner():
'''
加载页面 顶部
'''
return {
'categorys': Category.objects.all(),
'settings': settings
}
@register.inclusion_tag('dashboard/tags/sidebar_tag.html')
def load_sidebar_tag():
'''
加载侧边栏 标签
'''
tags = Tag.objects.all()
return {
'sidebar_tags': tags,
}
@register.inclusion_tag('dashboard/tags/sidebar_hot.html')
def load_sidebar_hot():
'''
加载首页侧边栏 热门文章
'''
return {
'hot_articles': Article.published.filter(ad_property=0).order_by('-views')[:10],
}
@register.inclusion_tag('dashboard/tags/sidebar_best_recomm.html')
def load_sidebar_best_recomm():
'''
加载首页侧边栏 优质推荐
'''
return {
'best_articles': Article.published.filter(ad_property=4)[:10]
}
@register.inclusion_tag('dashboard/tags/footer.html')
def load_footer():
'''
加载 footer
'''
from django.utils import timezone
return {
'year': timezone.now().year,
'settings': settings
}
@register.inclusion_tag('dashboard/tags/scroll.html')
def load_scroll():
'''
加载 咨询栏
'''
return {
'settings': settings
}
@register.simple_tag
def query(qs, **kwargs):
""" template tag which allows queryset filtering. Usage:
{% query books author=author as mybooks %}
{% for book in mybooks %}
...
{% endfor %}
"""
return qs.filter(**kwargs)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from xml.sax import make_parser, handler
edit_heat_map = []
for i in range(360):
edit_heat_map.append([0]*180)
class FancyCounter(handler.ContentHandler):
def __init__(self):
el = 0
def startElement(self, name, attrs):
el += 1
if el%1000000:
print "seen",el,"nodes"
if name == "node":
lat = 180-(int(float(attrs["lat"]))+90)
lon = int(float(attrs["lon"]))+180
edit_heat_map[lon][lat] += 1
def endElement(self, name):
pass
def endDocument(self):
pass
parser = make_parser()
counter = FancyCounter()
parser.setContentHandler(counter)
parser.parse("planet.osm")
print "dumping to bin_counts.pickle"
f = open("bin_counts.pickle")
import cPickle as pickle
pickle.dump(edit_heat_map, f)
f.close()
|
import asyncio
import sys
from collections import OrderedDict
import json
import shlex
import backoff
import coreapi
import coreschema
from django.utils.decorators import method_decorator
from django.utils import timezone
from django.views.decorators.cache import cache_page
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.utils.functional import cached_property
from django.shortcuts import redirect
from fnmatch import fnmatch
import logging
import os
import pydash
from paramiko import SSHClient, ssh_exception, RSAKey, AutoAddPolicy
from laxy_backend.scraping import (
render_page,
parse_cloudstor_links,
parse_simple_index_links,
is_apache_index_page,
parse_cloudstor_webdav,
)
from . import paramiko_monkeypatch
from toolz import merge as merge_dicts
import requests
import rest_framework_jwt
import celery
from celery import shared_task
from datetime import datetime
from django.conf import settings
from django.contrib.admin.views.decorators import user_passes_test
from django.db import transaction
from django.http import HttpResponse, StreamingHttpResponse, JsonResponse, FileResponse
from django.urls import reverse
from django.utils.encoding import force_text
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import OrderingFilter
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from fs.errors import DirectoryExpected
from io import BufferedReader, BytesIO, StringIO
from pathlib import Path
import paramiko
from robobrowser import RoboBrowser
from rest_framework import generics
from rest_framework import status
from rest_framework.settings import api_settings
from rest_framework.authtoken.models import Token
from rest_framework.decorators import (
api_view,
renderer_classes,
permission_classes,
authentication_classes,
)
from rest_framework.filters import BaseFilterBackend
from rest_framework_guardian.filters import ObjectPermissionsFilter
from rest_framework.pagination import PageNumberPagination
from rest_framework.parsers import JSONParser, MultiPartParser
from rest_framework.permissions import IsAdminUser, IsAuthenticated, AllowAny
from rest_framework.renderers import JSONRenderer, BaseRenderer, TemplateHTMLRenderer
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework_csv.renderers import PaginatedCSVRenderer
from guardian.shortcuts import get_objects_for_user
from typing import Dict, List, Union
import urllib
from urllib.parse import urlparse, parse_qs, unquote
from wsgiref.util import FileWrapper
from drf_openapi.utils import view_config
from laxy_backend.storage.http_remote import (
is_archive_link,
_check_content_size_and_resolve_redirects,
)
from .permissions import (
DefaultObjectPermissions,
HasReadonlyObjectAccessToken,
IsOwner,
IsPublic,
IsSuperuser,
is_owner,
HasAccessTokenForEventLogSubject,
token_is_valid,
FileSetHasAccessTokenForJob,
FileHasAccessTokenForJob,
)
from .filters import IsOwnerFilter, IsPublicFilter
from . import ena
from .tasks.job import (
bulk_move_job_rsync,
expire_old_job,
start_job,
index_remote_files,
_finalize_job_task_err_handler,
set_job_status,
kill_remote_job,
estimate_job_tarball_size,
move_job_files_to_archive_task,
get_job_expiry_for_status,
)
from .jwt_helpers import get_jwt_user_header_dict, get_jwt_user_header_str
from .models import (
Job,
ComputeResource,
Pipeline,
File,
FileSet,
SampleCart,
PipelineRun,
EventLog,
AccessToken,
SystemStatus,
get_primary_compute_location_for_files,
job_path_on_compute,
)
from .serializers import (
PatchSerializerResponse,
PutSerializerResponse,
JobSerializerResponse,
JobSerializerRequest,
ComputeResourceSerializer,
FileSerializer,
FileSerializerPostRequest,
FileSetSerializer,
FileSetSerializerPostRequest,
SampleCartSerializer,
PipelineRunSerializer,
PipelineRunCreateSerializer,
SchemalessJsonResponseSerializer,
JobListSerializerResponse,
JobListSerializerResponse_CSV,
PipelineSerializer,
EventLogSerializer,
JobEventLogSerializer,
JobFileSerializerCreateRequest,
RedirectResponseSerializer,
FileListing,
AccessTokenSerializer,
JobAccessTokenRequestSerializer,
JobAccessTokenResponseSerializer,
PingResponseSerializer,
SystemStatusSerializer,
)
from .util import (
sanitize_filename,
sh_bool,
laxy_sftp_url,
generate_uuid,
multikeysort,
get_content_type,
find_filename_and_size_from_url,
simplify_fastq_name,
longest_common_prefix,
)
from .storage import http_remote
from .view_mixins import (
JSONView,
GetMixin,
PatchMixin,
DeleteMixin,
PostMixin,
CSVTextParser,
PutMixin,
CSVTextParserPandas,
etag_headers,
JSONPatchMixin,
)
# from .models import User
from django.contrib.auth import get_user_model
from .data.genomics.genomes import REFERENCE_GENOME_MAPPINGS
from contextlib import closing
# This is a mapping of 'matchers' to link parsing functions.
# The matchers can be simple strings, which are tested as a substring of the URL,
# or a function like matcher(url, page_text). The matcher function returns True or False.
LINK_SCRAPER_MAPPINGS = [
("://cloudstor.aarnet.edu.au/plus/s/", parse_cloudstor_webdav),
# ('://cloudstor.aarnet.edu.au/plus/s/', parse_cloudstor_links),
(is_apache_index_page, parse_simple_index_links),
("://", parse_simple_index_links),
]
User = get_user_model()
logger = logging.getLogger(__name__)
class PingView(APIView):
renderer_classes = (JSONRenderer,)
permission_classes = (AllowAny,)
@view_config(response_serializer=PingResponseSerializer)
def get(self, request, version=None):
"""
Used by clients to poll if the backend is online.
"""
app_version = getattr(settings, "VERSION", "unspecified")
env = getattr(settings, "ENV", "unspecified")
status = None
try:
now = timezone.now()
status = (
SystemStatus.objects.filter(
(Q(active=True) & Q(start_time__lte=now) & Q(end_time__gte=now))
| (Q(active=True) & Q(start_time=None) & Q(end_time=None))
)
.order_by("-priority", "start_time", "-modified_time")
.first()
)
except SystemStatus.DoesNotExist:
pass
except Exception as ex:
logger.warning("PingView: %s" % ex)
return JsonResponse(
PingResponseSerializer(
{
"version": app_version,
"env": env,
"system_status": status,
}
).data
)
class JobDirectTarDownload(JSONView):
lookup_url_kwarg = "job_id"
queryset = Job.objects.all()
permission_classes = (IsOwner | IsSuperuser | HasReadonlyObjectAccessToken,)
def get(self, request, job_id, version=None):
"""
Download a tar.gz of every file in the job.
Supports `?access_token=` query parameter for obfuscated public link sharing.
"""
# must get object this way to correctly enforce permission_classes !
job = self.get_object()
# NOTE: The download method used here will only
# work on a job stored in a single SSH-accessible location, not one with
# archived files spread across object store etc. The MyTardis-style tarball
# download, using django-storages, would be required to do tarball downloads
# in that case.
stored_at = get_primary_compute_location_for_files(job.get_files())
if stored_at is None:
return HttpResponse(
status=status.HTTP_503_SERVICE_UNAVAILABLE,
reason=f"Files are currently unavailable for tarball download, try again later.",
)
job_path = job_path_on_compute(job, stored_at)
client = stored_at.ssh_client()
stdin, stdout, stderr = client.exec_command(
# eg, tar -chzf - --directory "/scratch/jobs" "12345abcdefghjobid"
f'tar -chzf - --directory "{stored_at.jobs_dir}" "{job.id}"'
)
if request.path.endswith(".tar.gz"):
output_fn = f"{job.id}.tar.gz"
else:
output_fn = f"laxy_job_{job.id}.tar.gz"
return FileResponse(stdout, filename=output_fn, as_attachment=True)
# TODO: Strangley, Swagger/CoreAPI only show the 'name' for the query parameter
# if name='query'. Any other value doesn't seem to appear in the
# auto-generated docs when applying this as a filter backend as intended
class QueryParamFilterBackend(BaseFilterBackend):
"""
This class largely exists so that query parameters can appear in the
automatic documentation.
A subclass is used in a DRF view like:
filter_backends = (CustomQueryParamFilterBackend,)
to specify the name, description and type of query parameters.
eg http://my_url/?query=somestring
To define query params subclass it and pass a list of dictionaries into the
superclass constructor like:
class CustomQueryParams(QueryParamFilterBackend):
def __init__(self):
super().__init__([{name: 'query',
description: 'A comma separated list of something.'}])
"""
def __init__(self, query_params: List[Dict[str, any]] = None):
if query_params is None:
query_params = []
for qp in query_params:
field = coreapi.Field(
name=qp.get("name"),
location=qp.get("location", qp.get("name")),
description=qp.get("description", None),
example=qp.get("example", None),
required=qp.get("required", True),
type=qp.get("type", "string"),
schema=coreschema.String(
title=force_text(
qp.get("title", (qp.get("name", False) or qp.get("name")))
),
description=force_text(qp.get("description", "")),
),
)
if hasattr(self, "schema_fields"):
self.schema_fields.append(field)
else:
self.schema_fields = [field]
def get_schema_fields(self, view):
return self.schema_fields
class StreamingFileDownloadRenderer(BaseRenderer):
media_type = "application/octet-stream"
format = "download"
charset = None
render_style = "binary"
@backoff.on_exception(
backoff.expo,
(
EOFError,
IOError,
OSError,
ssh_exception.SSHException,
ssh_exception.AuthenticationException,
),
max_tries=3,
jitter=backoff.full_jitter,
)
def render(self, filelike, media_type=None, renderer_context=None, blksize=8192):
iterable = FileWrapper(filelike, blksize=blksize)
try:
for chunk in iterable:
yield chunk
except ssh_exception.SSHException as ex:
raise IOError(
str(ex) + "(paramiko.ssh_exception.SSHException)"
).with_traceback(sys.exc_info()[2])
except OSError as ex:
raise IOError(str(ex) + "(OSError)").with_traceback(sys.exc_info()[2])
class RemoteFilesQueryParams(QueryParamFilterBackend):
def __init__(self):
super().__init__(
[
dict(
name="url",
example="https://bioinformatics.erc.monash.edu/home/andrewperry/test/sample_data/",
description="A URL containing links to input data files",
),
dict(
name="fileglob",
example="*.fastq.gz",
description="A glob (wildcard) expression to filter files returned. Doesn't filter directories",
),
]
)
class ENAQueryParams(QueryParamFilterBackend):
def __init__(self):
super().__init__(
[
dict(
name="accessions",
example="PRJNA276493,SRR950078",
description="A comma separated list of ENA/SRA accessions.",
),
]
)
class ENAQueryView(APIView):
renderer_classes = (JSONRenderer,)
serializer_class = SchemalessJsonResponseSerializer
# TODO: Would this be better achieved with a SearchFilter ?
# http://www.django-rest-framework.org/api-guide/filtering/#searchfilter
filter_backends = (ENAQueryParams,)
api_docs_visible_to = "public"
@view_config(response_serializer=SchemalessJsonResponseSerializer)
def get(self, request, version=None):
"""
Queries ENA metadata. Essentially a proxy for ENA REST API
requests by accession, converting the XML output to JSON
(eg https://www.ebi.ac.uk/ena/data/view/SRR950078&display=xml).
Returns JSON equivalent to the ENA response.
Query parameters:
* `accessions` - a comma seperated list of ENA accessions
<!--
:param request:
:type request:
:param version:
:type version:
:return:
:rtype:
-->
"""
accession_list = request.query_params.get("accessions", None)
if accession_list is not None:
accessions = accession_list.split(",")
ena_result = ena.search_ena_accessions(accessions)
return Response(ena_result, status=status.HTTP_200_OK)
return Response({}, status=status.HTTP_400_BAD_REQUEST)
class ENAFastqUrlQueryView(JSONView):
renderer_classes = (JSONRenderer,)
serializer_class = SchemalessJsonResponseSerializer
filter_backends = (ENAQueryParams,)
api_docs_visible_to = "public"
@view_config(response_serializer=SchemalessJsonResponseSerializer)
def get(self, request, version=None):
"""
Returns a JSON object contains study, experiment, run and sample
accessions associated with a given ENA accession, as well as the
FASTQ FTP download links, md5 checksum, size and read count.
Query parameters:
* `accessions` - a comma separated list of ENA accessions
<!--
:param request:
:type request:
:param version:
:type version:
:return:
:rtype:
-->
"""
accession_list = request.query_params.get("accessions", None)
if accession_list is not None:
accessions = accession_list.split(",")
# ena_result = ena.get_fastq_urls(accessions)
ena_result = ena.get_run_table(accessions)
return Response(ena_result, status=status.HTTP_200_OK)
return Response({}, status=status.HTTP_400_BAD_REQUEST)
class ENASpeciesLookupView(APIView):
renderer_classes = (JSONRenderer,)
serializer_class = SchemalessJsonResponseSerializer
api_docs_visible_to = "public"
# permission_classes = (AllowAny,)
@view_config(response_serializer=SchemalessJsonResponseSerializer)
def get(self, request, accession: str, version=None):
"""
Queries ENA with a sample accession and returns the species information.
Response example:
```json
{
"taxon_id":"10090",
"scientific_name":"Mus musculus",
"common_name":"house mouse"
}
```
<!--
:param accession: An ENA sample accession (eg SAMN07548382)
:type accession: str
:param request:
:type request:
:param version:
:type version:
:return:
:rtype:
-->
"""
try:
ena_result = ena.get_organism_from_sample_accession(accession)
return Response(ena_result, status=status.HTTP_200_OK)
except IndexError as ex:
return Response({}, status=status.HTTP_404_NOT_FOUND)
except requests.exceptions.HTTPError as ex:
raise ex
class FileCreate(JSONView):
queryset = File.objects.all()
serializer_class = FileSerializer
# permission_classes = (DjangoObjectPermissions,)
@view_config(
request_serializer=FileSerializerPostRequest, response_serializer=FileSerializer
)
def post(self, request: Request, version=None):
"""
Create a new File. UUIDs are autoassigned.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
obj = serializer.save(owner=request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class JSONPatchRFC7386Parser(JSONParser):
media_type = "application/merge-patch+json"
class JSONPatchRFC6902Parser(JSONParser):
media_type = "application/json-patch+json"
class StreamFileMixin(JSONView):
def _as_file_obj(self, obj_ref: Union[str, File]):
"""
Convert a File UUID string to a File instance, if required.
"""
if obj_ref is None:
return None
if isinstance(obj_ref, str):
try:
obj = File.objects.get(id=obj_ref)
except File.DoesNotExist:
return None
else:
obj = obj_ref
return obj
def _add_metalink_headers(self, obj, response):
url = self.request.build_absolute_uri(obj.get_absolute_url())
response["Link"] = f"<{url}>; rel=duplicate"
if hasattr(obj, "checksum") and obj.checksum:
hashtype = obj.checksum_type
b64checksum = obj.checksum_hash_base64
response["Digest"] = f"{hashtype.upper()}={b64checksum}"
response["Etag"] = f"{obj.checksum}"
return response
@backoff.on_exception(
backoff.expo,
(
EOFError,
IOError,
OSError,
ssh_exception.SSHException,
ssh_exception.AuthenticationException,
),
max_tries=3,
jitter=backoff.full_jitter,
)
def _stream_response(
self, obj_ref: Union[str, File], filename: str = None, download: bool = True
) -> Union[StreamingHttpResponse, Response]:
obj = self._as_file_obj(obj_ref)
if obj is None:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND,
reason=f"File object does not exist ({obj_ref})",
)
if obj.file is None:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND,
reason=f"File data is unavailable (missing location) ({obj_ref})",
)
renderer = StreamingFileDownloadRenderer()
# TODO: For local file:// URLs, django.http.response.FileResponse will probably preform better
response = StreamingHttpResponse(
renderer.render(obj.file), content_type=renderer.media_type
)
# A filename can optionally be specified in the URL, so that
# wget will 'just work' without requiring the --content-disposition
# flag, eg:
# wget http://laxy.org/api/v1/file/XXblafooXX/alignment.bam
# vs.
# wget --content-disposition http://laxy.org/api/v1/file/XXblafooXX/
#
if filename is not None:
if filename != obj.name:
return Response(status=status.HTTP_404_NOT_FOUND)
if download:
response["Content-Disposition"] = f'attachment; filename="{obj.name}"'
else:
response["Content-Disposition"] = "inline"
# Make the browser guess the Content-Type
del response["Content-Type"]
size = obj.metadata.get("size", None)
if obj.file is not None and hasattr(obj.file, "size"):
if obj.file.size is not None:
response["Content-Length"] = int(obj.file.size)
elif size is not None:
response["Content-Length"] = int(size)
self._add_metalink_headers(obj, response)
return response
def download(self, obj_ref: Union[str, File], filename=None):
return self._stream_response(obj_ref, filename, download=True)
def view(self, obj_ref: Union[str, File], filename=None):
return self._stream_response(obj_ref, filename, download=False)
class FileContentDownload(StreamFileMixin, GetMixin, JSONView):
queryset = File.objects.all()
serializer_class = FileSerializer
# permission_classes = (DjangoObjectPermissions,)
@view_config(response_serializer=FileSerializer)
def get(self, request: Request, uuid=None, filename=None, version=None):
"""
Downloads the content of a File.
When using a web browser, if the query parameter `download` is included
the file will be downloaded rather than viewed in a new tab
(via the `Content-Disposition: attachment` header).
If file checksums (eg MD5) are present, these are included as a
header:
`Digest: MD5=thisIsABase64EnC0DeDMd5sum==`.
A filename can optionally be specified as the last part of the the URL
path, so that `wget` will 'just work' without requiring the
`--content-disposition` flag. The filename must match the name stored
in the File record.
Examples:
### File content (view in browser)
**Request:**
`Content-Type: application/octet-stream`
`GET` http://laxy.org/api/v1/file/XXblafooXX/content/alignment.bam
**Response:**
Headers:
`Content-Disposition: inline`
`Digest: MD5=thisIsABase64EnC0DeDMd5sum==`
Body:
.. file content ..
### File content (download in browser)
**Request:**
`Content-Type: application/octet-stream`
`GET` http://laxy.org/api/v1/file/XXblafooXX/content/alignment.bam?download
**Response:**
Headers:
`Content-Disposition: attachment; filename=alignment.bam`
`Digest: MD5=thisIsABase64EnC0DeDMd5sum==`
Body:
.. file content ..
## File download with `wget`
`wget http://laxy.org/api/v1/file/XXblafooXX/content/alignment.bam`
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
# File view/download is the default when no Content-Type is specified
if "download" in request.query_params:
return super().download(uuid, filename=filename)
else:
return super().view(uuid, filename=filename)
class FileView(
StreamFileMixin,
GetMixin,
DeleteMixin,
PatchMixin,
PutMixin,
JSONPatchMixin,
JSONView,
):
queryset = File.objects.all()
serializer_class = FileSerializer
parser_classes = (JSONParser, JSONPatchRFC7386Parser, JSONPatchRFC6902Parser)
permission_classes = (
IsOwner | IsSuperuser | HasReadonlyObjectAccessToken | FileHasAccessTokenForJob,
)
# permission_classes = (DjangoObjectPermissions,)
@view_config(response_serializer=FileSerializer)
@etag_headers
def get(self, request: Request, uuid=None, filename=None, version=None):
"""
Returns info about a file or downloads the content.
File is specified by it's UUID.
If the `Content-Type: application/json` header is used in the
request, the JSON record for the file is returned.
Other `Content-Type`s return the content of the file.
The filename isn't included in the URL, but is returned as
part of the JSON data or via a `Content-Disposition` header
in the response.
See the [file/{uuid}/content/{filename} docs](#operation/v1_file_content_read) for
details about file content downloads where the filename is included in the URL
(useful in cases where a tool assumes the URL path contains the filename)
Examples:
### File record data as JSON
**Request:**
`Content-Type: application/json`
`GET` http://laxy.org/api/v1/file/XXblafooXX/content/alignment.bam
**Response:**
```json
{
"id": "XXblafooXX",
"name": "alignment.bam",
"location": "http://example.com/datasets/1/alignment.bam",
"owner": "admin",
"checksum": "md5:f3c90181aae57b887a38c4e5fe73db0c",
"type_tags": ['bam', 'bam.sorted', 'alignment']
"metadata": { }
}
```
To correctly set the filename:
`wget --content-disposition http://laxy.org/api/v1/file/XXblafooXX/`
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
content_type = get_content_type(request)
if content_type == "application/json":
return super().get(request, uuid)
else:
# File view/download is the default when no Content-Type is specified
try:
try:
f = File.objects.get(id=uuid)
except File.DoesNotExist as ex:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND,
reason="File with this ID does not exist.",
)
if not f.location:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND,
reason="File has no default download location.",
)
if "download" in request.query_params:
return super().download(uuid, filename=filename)
else:
return super().view(uuid, filename=filename)
except (
ssh_exception.AuthenticationException,
ssh_exception.SSHException,
EOFError,
) as ex:
return HttpResponse(
status=status.HTTP_503_SERVICE_UNAVAILABLE,
reason="Error accessing file via SFTP storage backend",
)
@view_config(
request_serializer=FileSerializer, response_serializer=PatchSerializerResponse
)
def patch(self, request, uuid=None, version=None):
"""
Partial update of fields on File.
If the header `Content-Type: application/merge-patch+json` is set,
the `metadata` field is patched as per the specification in
[RFC 7386](https://tools.ietf.org/html/rfc7386). eg, if the existing
metadata was:
```json
{"metadata": {"tags": ["A"], "name": "seqs.fastq.gz", "path": "/tmp"}}
```
The patch in a request:
```json
{"metadata": {"tags": ["B", "C"], "path": null}}
```
Would change it to:
```json
{"metadata": {"tags": ["B", "C"], "name": "seqs.fastq.gz"}}
```
If `Content-Type: application/json-patch+json` is set, `metadata`
should be an array of mutation operations to apply as per
[RFC 6902](https://tools.ietf.org/html/rfc6902).
<!--
:param request:
:type request:
:param uuid:
:type uuid:
:param version:
:type version:
:return:
:rtype:
-->
"""
for field in File.ExtraMeta.patchable_fields:
resp = self._try_json_patch(request, field=field)
if resp is not None:
return resp
return super(FileView, self).patch(request, uuid)
@view_config(
request_serializer=FileSerializerPostRequest, response_serializer=FileSerializer
)
def put(self, request: Request, uuid: str, version=None):
"""
Replace the content of an existing File.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(FileView, self).put(
request, uuid, serializer_class=FileSerializerPostRequest
)
class JobFileView(StreamFileMixin, GetMixin, JSONView):
queryset = Job.objects.all()
serializer_class = FileSerializer
parser_classes = (JSONParser,)
permission_classes = (IsOwner | IsSuperuser | HasReadonlyObjectAccessToken,)
@view_config(response_serializer=FileSerializer)
@etag_headers
def get(self, request: Request, uuid: str, file_path: str, version=None):
"""
Get a `File` by path, associated with this `Job`.
See the documentation for [file/{uuid}/content/ docs](#operation/v1_file_content_read)
endpoints for a description on how `Content-Types` and the `?download`
query strings are handled (JSON response vs. download vs. view).
Valid values for `file_path` are:
- `input`
- `output`
corresponding to the input and output FileSets respectively.
<!--
:param request:
:type request:
:param uuid:
:type uuid:
:param file_path:
:type file_path:
:return:
:rtype:
-->
"""
job = self.get_object()
if job is None:
return Response(
{"detail": f"Unknown job ID: {uuid}"}, status=status.HTTP_404_NOT_FOUND
)
fname = Path(file_path).name
fpath = Path(file_path).parent
file_obj = job.get_files().filter(name=fname, path=fpath).first()
if file_obj is None:
return Response(
{"detail": f"Cannot find file in job {uuid} by path/filename"},
status=status.HTTP_404_NOT_FOUND,
)
# serializer = self.get_serializer(instance=file_obj)
# return Response(serializer.data, status=status.HTTP_200_OK)
content_type = get_content_type(request)
if content_type == "application/json":
return super().get(request, file_obj.id)
else:
try:
# File view/download is the default when no Content-Type is specified
if "download" in request.query_params:
logger.debug(f"Attempting download of {file_obj.id}")
return super().download(file_obj, filename=fname)
else:
logger.debug(f"Attempting view in browser of {file_obj.id}")
return super().view(file_obj, filename=fname)
except (
ssh_exception.AuthenticationException,
ssh_exception.SSHException,
EOFError,
) as ex:
return HttpResponse(
status=status.HTTP_503_SERVICE_UNAVAILABLE,
reason="Error accessing file via SFTP storage backend",
)
# return super(FileView, self).get(request, file_obj.id)
@transaction.atomic()
@view_config(
request_serializer=JobFileSerializerCreateRequest,
response_serializer=FileSerializer,
)
def put(self, request: Request, uuid: str, file_path: str, version=None):
"""
Create (or replace) a File record by job ID and path. This endpoint
is generally intended to be called by the job script on a compute node
to register files with specific `checksum`, `metadata`, `type_tags`
and possibly `location` fields.
`file_path` is the relative path of the file in the job directory. It
must begin with `input/` or `output/`, corresponding to the input and
output FileSets.
Typically you should not set `location` - it is automatically generated
to be a URL pointing to data accessible on the ComputeResource.
`location` can be set if your job script manually stages the job files
to another location (eg, stores outputs in an object store like S3).
<!--
:param request:
:type request:
:param uuid:
:type uuid:
:param file_path:
:type file_path:
:param version:
:type version:
:return:
:rtype:
-->
"""
job = self.get_object()
fname = Path(file_path).name
fpath = Path(file_path).parent
fileset_path = fpath.parts[0]
if fileset_path == "output":
fileset = job.output_files
elif fileset_path == "input":
fileset = job.input_files
else:
return Response(status=status.HTTP_404_NOT_FOUND)
# Generate a File.location URL if not set explicitly
data = dict(request.data)
data["name"] = fname
data["path"] = str(fpath)
location = data.get("location", None)
if not location:
data["location"] = laxy_sftp_url(job, f"{fpath}/{fname}")
elif not urlparse(location).scheme:
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason="Location must be a valid URL.",
)
# TODO: consider how best to handle file:// URLs here
# file:// URLs could be used in the location field if the job
# directories are mounted on both the compute node and the server.
# We could treat them as a path relative to the job directory (given
# that we know the job here).
# We need to be careful when creating Files with file:// locations -
# there is the potential for a tricky user to create locations
# that point to anywhere on the server filesystem (eg absolute path to
# /etc/passwd). For the moment they are disallowed here
if urlparse(location).scheme == "file":
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason="file:// locations are not allowed " "using this API endpoint.",
)
# # we make the path relative, even if there is a leading /
# cleaned = location.lstrip('file://').lstrip('/')
# if '../' in cleaned:
# return HttpResponse(status=status.HTTP_400_BAD_REQUEST,
# reason="file:// location cannot contain "
# "../ in relative paths.")
#
# data['location'] = (f'laxy+file://'
# f'{job.compute_resource.id}/{job_id}/{cleaned}')
file_obj = fileset.get_files_by_path(file_path).first()
if file_obj is None:
# Create new file. Inferred location based on job+compute
# We actually use the POST serializer to include name and path etc
serializer = FileSerializerPostRequest(
data=data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
fileset.add(serializer.instance)
data = self.response_serializer(serializer.instance).data
return Response(data, status=status.HTTP_201_CREATED)
else:
# Update existing File
serializer = self.request_serializer(
file_obj, data=request.data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
return Response(
status=status.HTTP_200_OK, data=serializer.validated_data
)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# TODO: This endpoint should properly set owner, location etc
class JobFileBulkRegistration(JSONView):
queryset = Job.objects.all()
serializer_class = JobSerializerResponse
parser_classes = (
JSONParser,
CSVTextParserPandas,
)
permission_classes = (IsOwner | IsSuperuser,)
@view_config(
request_serializer=JobFileSerializerCreateRequest,
response_serializer=JobSerializerResponse,
)
def post(self, request, uuid, version=None):
"""
Bulk registration of Job files (input and output filesets).
Use `Content-Type: text/csv`, with CSV or TSV like:
```
checksum,filepath,metadata,type_tags
md5:7d9960c77b363e2c2f41b77733cf57d4,input/some_dir/table.txt,{},"text,csv,google-sheets"
md5:d0cfb796d371b0182cd39d589b1c1ce3,input/some_dir/sample1_R2.fastq.gz,{},fastq
md5:a97e04b6d1a0be20fcd77ba164b1206f,input/some_dir/sample2_R2.fastq.gz,{},fastq
md5:7c9f22c433ae679f0d82b12b9a71f5d3,output/sample2/alignments/sample2.bam,{"some":"metdatas"},"bam,alignment,bam.sorted,jbrowse"
md5:e57ea180602b69ab03605dad86166fa7,output/sample2/alignments/sample2.bai,{},"bai,jbrowse"
```
File paths must begin with `input` or `output`.
A `location` column can also be added with a URL to specify the location of files.
You should only use this if the job stages files itself to another location
(eg S3, Object store, ftp:// or sftp:// location).
Otherwise Laxy handles creating the correct `location` field.
<!--
:param request:
:type request:
:param uuid:
:type uuid:
:param version:
:type version:
:return:
:rtype:
-->
"""
job = self.get_object()
content_type = get_content_type(request)
if content_type == "application/json":
serializer = self.request_serializer(data=request.data, many=True)
if serializer.is_valid():
# TODO: accept JSON for bulk file registration
# separate into input and output files, add files to
# job.input_files and job.output_files
pass
raise NotImplementedError()
elif content_type == "text/csv":
tsv_table = request.stream.read()
infiles, outfiles = job.add_files_from_tsv(tsv_table)
i = FileSerializer(infiles, many=True)
o = FileSerializer(outfiles, many=True)
resp_data = {"input_files": i.data, "output_files": o.data}
return Response(resp_data, status=status.HTTP_200_OK)
class FileSetCreate(PostMixin, JSONView):
queryset = FileSet.objects.all()
serializer_class = FileSetSerializer
# permission_classes = (DjangoObjectPermissions,)
@view_config(
request_serializer=FileSetSerializerPostRequest,
response_serializer=FileSetSerializer,
)
def post(self, request: Request, version=None):
"""
Create a new FileSet. UUIDs are autoassigned.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(FileSetCreate, self).post(request)
class FileSetView(GetMixin, DeleteMixin, PatchMixin, JSONView):
queryset = FileSet.objects.all()
serializer_class = FileSetSerializer
permission_classes = (IsAuthenticated | FileSetHasAccessTokenForJob,)
# permission_classes = (DjangoObjectPermissions,)
# @method_decorator(cache_page(60 * 60 * 1))
@view_config(response_serializer=FileSetSerializer)
@etag_headers
def get(self, request: Request, uuid, version=None):
"""
Returns info about a FileSet, specified by UUID.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(FileSetView, self).get(request, uuid)
@view_config(
request_serializer=FileSetSerializer,
response_serializer=PatchSerializerResponse,
)
def patch(self, request, uuid, version=None):
return super(FileSetView, self).patch(request, uuid)
class SampleCartCreateUpdate(JSONView):
queryset = SampleCart.objects.all()
serializer_class = SampleCartSerializer
parser_classes = (
JSONParser,
MultiPartParser,
CSVTextParser,
)
def create_update(self, request, obj):
"""
Replaces an existing SampleCart with new content, or creates a new one if `uuid` is None.
:param obj:
:type obj:
:param request:
:type request:
:return:
:rtype:
"""
content_type = get_content_type(request)
# We use utf-8-sig to strip any initial byte order mark BOM (\ufeff) character
encoding = "utf-8-sig"
if content_type == "multipart/form-data":
if not obj.name:
obj.name = "CSV uploaded on %s" % datetime.isoformat(timezone.now())
fh = request.data.get("file", None)
csv_table = fh.read().decode(encoding)
obj.from_csv(csv_table)
return Response(
self.get_serializer(instance=obj).data, status=status.HTTP_200_OK
)
elif content_type == "text/csv":
if not obj.name:
obj.name = "CSV uploaded on %s" % datetime.isoformat(timezone.now())
# CSVTextParser ensures request.data is already parsed as a list of lists
csv_table = request.data
obj.from_csv(csv_table)
return Response(
self.get_serializer(instance=obj).data, status=status.HTTP_200_OK
)
elif content_type == "application/json":
if not obj.name:
obj.name = "Sample set created on %s" % datetime.isoformat(
timezone.now()
)
serializer = self.get_serializer(instance=obj, data=request.data)
if serializer.is_valid():
obj = serializer.save(owner=request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(serializer.data, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(None, status=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE)
class SampleCartCreate(SampleCartCreateUpdate):
# permission_classes = (DjangoObjectPermissions,)
@view_config(
request_serializer=SampleCartSerializer,
response_serializer=SampleCartSerializer,
)
def post(self, request: Request, version=None):
"""
Create a new SampleCart. UUIDs are autoassigned.
`samples` is an object keyed by sample name, with a list of files
grouped by 'merge group' and pair (a 'merge group' could be a set of
equivalent lanes the sample was split across, or a technical replicate):
Equivalent samples (technical replicates) in different lanes can be merged -
they could also be thought of as split FASTQ files.
Several content-types are supported:
- `application/json` (accepting JSON objects below)
- `text/csv` where the POST body is CSV text as in:
https://tools.ietf.org/html/rfc4180
- `multipart/form-data` where the `file` field is the CSV file.
CSV example:
```csv
SampleA,ftp://bla_lane1_R1.fastq.gz,ftp://bla_lane1_R2.fastq.gz
SampleA,ftp://bla_lane2_R1.fastq.gz,ftp://bla_lane2_R2.fastq.gz
SampleB,ftp://bla2_R1_001.fastq.gz,ftp://bla2_R2_001.fastq.gz
,ftp://bla2_R1_002.fastq.gz,ftp://bla2_R2_002.fastq.gz
SampleC,ftp://foo2_lane4_1.fastq.gz,ftp://foo2_lane4_2.fastq.gz
SampleC,ftp://foo2_lane5_1.fastq.gz,ftp://foo2_lane5_2.fastq.gz
```
Columns are sampleName, R1 file, R2 file.
Repeated sample names represent 'merge groups' (eg additional lanes
containing technical replicates).
JSON request body example:
A single 'sampleName' actually corresponds to a
Sample+Condition+BiologicalReplicate.
For two samples (R1, R2 paired end) split across two lanes, using
File UUIDs:
*TODO*: Change this to files: [{R1: {location: "http://foo/bla.txt", name: "bla.txt}] form
shaped like a subset of models.File fields.
```json
{
"name": "My New Sample Set",
"samples": [
{
"name": "sample_wildtype",
files: [
{
"R1": "2VSd4mZvmYX0OXw07dGfnV",
"R2": "3XSd4mZvmYX0OXw07dGfmZ"
},
{
"R1": "Toopini9iPaenooghaquee",
"R2": "Einanoohiew9ungoh3yiev"
}]
},
{
"name": "sample_mutant",
"files": [
{
"R1": "zoo7eiPhaiwion6ohniek3",
"R2": "ieshiePahdie0ahxooSaed"
},
{
"R1": "nahFoogheiChae5de1iey3",
"R2": "Dae7leiZoo8fiesheech5s"
}]
}
]
}
```
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
# request.data isn't guaranteed to be a JSON-derived dict
samplecart_name = None
if hasattr(request.data, "get"):
samplecart_name = request.data.get("name", None)
obj = SampleCart(name=samplecart_name, owner=request.user)
return self.create_update(request, obj)
class SampleCartView(GetMixin, DeleteMixin, SampleCartCreateUpdate):
# permission_classes = (DjangoObjectPermissions,)
@view_config(response_serializer=SampleCartSerializer)
@etag_headers
def get(self, request: Request, uuid, version=None):
"""
Returns info about a FileSet, specified by UUID.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(SampleCartView, self).get(request, uuid)
@view_config(
request_serializer=SampleCartSerializer,
response_serializer=PutSerializerResponse,
)
def put(self, request, uuid, version=None):
obj = self.get_object()
if "id" in request.data:
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST, reason="id cannot be updated"
)
# request.data isn't guaranteed to be a JSON-derived dict
sample_name = None
if hasattr(request.data, "get"):
sample_name = request.data.get("name", None)
if sample_name is not None:
obj.name = sample_name
return self.create_update(request, obj)
# TODO: CSV upload doesn't append/merge, it aways creates a new SampleCart.
# Implement PATCH method so we can append/merge an uploaded CSV rather
# than just replace wholesale
#
# @view_config(request_serializer=SampleCartSerializer,
# response_serializer=PatchSerializerResponse)
# def patch(self, request, uuid, version=None):
# return super(SampleCartView, self).patch(request, uuid)
class ComputeResourceView(GetMixin, DeleteMixin, JSONView):
queryset = ComputeResource.objects.all()
serializer_class = ComputeResourceSerializer
permission_classes = (IsAdminUser,)
def get(self, request: Request, uuid, version=None):
"""
Returns info about a ComputeResource, specified by UUID.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(ComputeResourceView, self).get(request, uuid)
@view_config(
request_serializer=ComputeResourceSerializer,
response_serializer=PatchSerializerResponse,
)
def patch(self, request: Request, uuid, version=None):
"""
Updates a ComputeResource record. Since this is a PATCH request,
partial updates are allowed.
**Side effect:** for disposable compute resources changing
`status` to `decommissioned` or `terminating` will
shutdown / terminate this resource so it is no longer available.
<!--
:param request:
:type request: rest_framework.request.Request
:param uuid: The compute resource UUID.
:type uuid: str
:return:
:rtype: rest_framework.response.Response
-->
"""
obj = self.get_object()
serializer = self.get_serializer(instance=obj, data=request.data, partial=True)
if serializer.is_valid():
req_status = serializer.validated_data.get("status", None)
if (
obj.status == ComputeResource.STATUS_STARTING
or obj.status == ComputeResource.STATUS_ONLINE
) and (
req_status == ComputeResource.STATUS_DECOMMISSIONED
or req_status == ComputeResource.STATUS_TERMINATING
):
# remove the status field supplied in the request.
# this task will update the status in the database itself
serializer.validated_data.pop("status")
obj.dispose()
serializer.save()
return Response(status=status.HTTP_204_NO_CONTENT)
# return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ComputeResourceCreate(PostMixin, JSONView):
queryset = ComputeResource.objects.all()
serializer_class = ComputeResourceSerializer
permission_classes = (IsAdminUser,)
@view_config(
request_serializer=ComputeResourceSerializer,
response_serializer=ComputeResourceSerializer,
)
def post(self, request: Request, version=None):
"""
Create a new ComputeResource. UUIDs are autoassigned.
The `extra` field is a JSON object. Attributes may include:
* `username` - the login name to access the ComputeResource
* `private_key` - a Base64 encoded SSH private key (eg, generated on
the commandline like `base64 < ~/.ssh/id_rsa`).
* `base_dir` - the absolute path to where job processing directories
will be created on the ComputeResource.
* `queue_type` - `slurm`, `local` or another custom value - determines
how to do job submission, monitoring and cancellation for this host.
This is passed to the job script (eg `run_job.sh`), to tell it to
either submit long-running tasks to a SLURM queue (via sbatch/srun),
or run all processes on the local compute node, or do something else.
* `slurm` (optional) - configuration options for SLURM jobs. `account`
is used for the `sbatch --account=` option, `extra_args` are any additional
commandline arguments (eg `--partition=fast`) to add to sbatch calls.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(ComputeResourceCreate, self).post(request)
class PipelineRunCreate(PostMixin, JSONView):
queryset = PipelineRun.objects.all()
serializer_class = PipelineRunCreateSerializer
# permission_classes = (DjangoObjectPermissions,)
@view_config(
request_serializer=PipelineRunCreateSerializer,
response_serializer=PipelineRunSerializer,
)
def post(self, request: Request, version=None):
"""
Create a new PipelineRun. UUIDs are autoassigned.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(PipelineRunCreate, self).post(request)
class PipelineRunView(GetMixin, DeleteMixin, PutMixin, PatchMixin, JSONView):
queryset = PipelineRun.objects.all()
serializer_class = PipelineRunSerializer
# permission_classes = (DjangoObjectPermissions,)
@view_config(response_serializer=PipelineRunSerializer)
@etag_headers
def get(self, request: Request, uuid, version=None):
"""
Returns info about a PipelineRun, specified by UUID.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(PipelineRunView, self).get(request, uuid)
@view_config(
request_serializer=PipelineRunSerializer,
response_serializer=PipelineRunSerializer,
)
def patch(self, request, uuid, version=None):
return super(PipelineRunView, self).patch(request, uuid)
@view_config(
request_serializer=PipelineRunCreateSerializer,
response_serializer=PipelineRunSerializer,
)
def put(self, request: Request, uuid: str, version=None):
"""
Replace the content of an existing PipelineRun.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The PipelineRun id to update.
:type uuid: str
:param version:
:type version:
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
return super(PipelineRunView, self).put(
request, uuid, serializer_class=PipelineRunCreateSerializer
)
class JobView(JSONPatchMixin, JSONView):
queryset = Job.objects.all()
serializer_class = JobSerializerResponse
permission_classes = (IsOwner | IsSuperuser | HasReadonlyObjectAccessToken,)
# permission_classes = (DjangoObjectPermissions,)
parser_classes = (JSONParser, JSONPatchRFC7386Parser, JSONPatchRFC6902Parser)
@view_config(response_serializer=JobSerializerResponse)
@etag_headers
def get(self, request: Request, uuid, version=None):
"""
Returns info about a Job, specified by Job ID (UUID).
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
obj = self.get_object()
serializer = self.get_serializer(instance=obj)
return Response(serializer.data, status=status.HTTP_200_OK)
@view_config(
request_serializer=JobSerializerRequest,
response_serializer=PatchSerializerResponse,
)
def patch(self, request: Request, uuid, version=None):
"""
The main purpose of this endpoint is to update job `status` and
`exit_code`. Setting `exit_code` automatically updates the job status
(zero implies 'complete', non-zero is 'failed').
Note that in some cases updating job `status` may have side-effects
beyond simply updating the Job record.
Eg, changing `status` to "complete", "cancelled" or "failed" may
terminate the associated compute instance if it was a single-job
disposable ComputeResource, or trigger movement or cleanup of
staged / temporary / intermediate files.
Valid job statuses are:
* "created"
* "hold"
* "starting"
* "running"
* "failed"
* "cancelled"
* "complete"
Also supports json patching for params and metadata if the `Content-Type`
is `application/merge-patch+json` or `application/json-patch+json`.
<!--
:param request:
:type request: rest_framework.request.Request
:param uuid: The Job id.
:type uuid: str
:return:
:rtype: rest_framework.response.Response
-->
"""
job = self.get_object()
original_status = job.status
patchable_fields = Job.ExtraMeta.patchable_fields
if self._is_json_patch_content_type(request):
for field in patchable_fields:
request = self._patch_request(request, obj=job, field=field)
else:
if any([request.data.get(field, None) for field in patchable_fields]):
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason=f"Invalid Content-Type for PATCH on: {', '.join(patchable_fields)}. "
f"Use application/merge-patch+json or application/json-patch+json",
)
serializer = self.get_serializer(instance=job, data=request.data, partial=True)
if serializer.is_valid():
# Don't allow cancelled jobs to be updated to any other
# status via the API
if original_status == Job.STATUS_CANCELLED:
_expiry = job.expiry_time or get_job_expiry_for_status(
Job.STATUS_CANCELLED
)
serializer.save(status=original_status, expiry_time=_expiry)
return Response(status=status.HTTP_204_NO_CONTENT)
# Providing only an exit_code sets job status
new_status = serializer.validated_data.get("status", None)
exit_code = serializer.validated_data.get("exit_code", None)
if new_status is None and exit_code is not None:
if exit_code == 0:
serializer.validated_data.update(status=Job.STATUS_COMPLETE)
else:
serializer.validated_data.update(status=Job.STATUS_FAILED)
new_status = serializer.validated_data.get("status", None)
expiry = get_job_expiry_for_status(new_status)
task_data = dict(
job_id=uuid,
status=new_status,
# tarball_size_use_heuristic=True,
)
status_changed_to = None
if new_status is not None and new_status != original_status:
status_changed_to = new_status
ingestion_delay_time = 30 # seconds
if status_changed_to in [
Job.STATUS_COMPLETE,
Job.STATUS_FAILED,
]:
# We don't update the status yet - an async task will do this after file indexing is complete
serializer.save(status=original_status, expiry_time=expiry)
task_list = []
if job.compute_resource and job.compute_resource.archive_host:
task_data["dst_compute_id"] = job.compute_resource.archive_host_id
task_list.extend(
[
index_remote_files.s(task_data=task_data),
set_job_status.s(),
# We only determine a rough tarball estimate initially (use_heuristic=True)
# Since optional=True, later tasks will run even if estimate_job_tarball_size fails
estimate_job_tarball_size.s(
optional=True, use_heuristic=True
),
bulk_move_job_rsync.s(),
# move_job_files_to_archive_task is an alternative to bulk_move_job_rsync
# that doesn't use rsync but moves the job file by file. It's generally slower.
# Since moving files is intended to be idempotent, we can run this here to
# catch anything that failed to rsync, somehow.
# move_job_files_to_archive_task.s(),
# After moving the files, estimate an accurate tarball size
estimate_job_tarball_size.s(
optional=True, use_heuristic=False
),
]
)
result = celery.chain(task_list).apply_async(
countdown=ingestion_delay_time, # we give a short delay for the run_job.sh script to finish before ingestion begins
link_error=_finalize_job_task_err_handler.s(job_id=job.id),
)
else:
result = celery.chain(
index_remote_files.s(task_data=task_data),
set_job_status.s(),
estimate_job_tarball_size.s(optional=True, use_heuristic=False),
).apply_async(
countdown=ingestion_delay_time, # we give a short delay for the run_job.sh script to finish before ingestion begins
link_error=_finalize_job_task_err_handler.s(job_id=job.id),
)
# For a cancelled job we kill running tasks first, before other usual tasks
elif status_changed_to == Job.STATUS_CANCELLED:
task_data["status"] == Job.STATUS_CANCELLED
task_data["ttl"] = getattr(settings, "JOB_EXPIRY_TTL_CANCELLED", 0)
serializer.save(expiry_time=expiry, status=Job.STATUS_CANCELLED)
cancel_tasks = [
kill_remote_job.s(task_data),
index_remote_files.s(),
expire_old_job.s(),
]
if job.compute_resource and job.compute_resource.archive_host:
cancel_tasks.append(bulk_move_job_rsync.s())
cancel_tasks.append(
estimate_job_tarball_size.s(optional=True, use_heuristic=False)
)
celery.chain(cancel_tasks).apply_async(
countdown=ingestion_delay_time,
link_error=_finalize_job_task_err_handler.s(job_id=job.id),
)
else:
serializer.save(expiry_time=expiry)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request: Request, uuid, version=None):
"""
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: A job UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
job = self.get_object()
if job.compute_resource.disposable:
task_data = dict(job_id=uuid)
job.compute_resource.dispose()
job.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
def get_abs_backend_url(
path_query_frag: str, request: Union[None, Request] = None
) -> str:
"""
Given a path?query=string#and_fragment (eg produced by the django 'reverse' function).
return the absolute URL to the API backend.
Optionally, a request object can be provided to help guess the scheme, FQDN or hostname and port.
If the FRONTEND_API_URL setting (LAXY_FRONTEND_API_URL environment variable) is defined,
this is used as the hostname:port. If no scheme is specified in FRONTEND_API_URL, the
USE_SSL setting determines the scheme.
:param path_query_frag:
:type path_query_frag:
:param request:
:type request:
:return:
:rtype:
"""
# return urlparse(''.join(settings.LAXY_FRONTEND_API_URL, path_query_frag)).geturl()
api_baseurl = settings.FRONTEND_API_URL
if request is None and not api_baseurl:
raise ValueError(
"LAXY_FRONTEND_API_URL is not set; a request arg must be provided."
)
if request is not None:
url = urlparse(request.build_absolute_uri(path_query_frag))
if api_baseurl:
apiurl = urlparse(api_baseurl)
url = url._replace(netloc=apiurl.netloc)
if apiurl.scheme:
url = url._replace(scheme=apiurl.scheme)
if not url.scheme:
url = url._replace(scheme="https" if settings.USE_SSL else "http")
return url.geturl()
def add_sanitized_names_to_samplecart_json(cart_json):
updated_json = dict(cart_json)
samples = updated_json.get("samples", [])
for s in samples:
sane_prefixes = []
for f in s.get("files", []):
for paircode in f.keys():
url = f[paircode]["location"]
fn, _ = find_filename_and_size_from_url(url, sanitize_name=True)
f[paircode]["sanitized_filename"] = fn
sane_prefixes.append(simplify_fastq_name(fn))
# If the sample has a non-empty `name` add a sanitized version
if s["name"] is not None and s["name"].strip():
s["sanitized_name"] = sanitize_filename(s["name"])
else:
# when no sample `name` is set, derive a sanitized one from the associated filenames
s["sanitized_name"] = longest_common_prefix(sane_prefixes)
return updated_json
@shared_task(bind=True)
def _task_err_handler(cxt=None, ex=None, job_id=None):
if job_id is not None:
job = Job.objects.get(id=job_id)
job.status = Job.STATUS_FAILED
job.save()
if job.compute_resource and job.compute_resource.disposable:
job.compute_resource.dispose()
class JobCreate(JSONView):
queryset = Job.objects.all()
serializer_class = JobSerializerRequest
@view_config(
request_serializer=JobSerializerRequest,
response_serializer=JobSerializerResponse,
)
def post(self, request: Request, version=None):
"""
Create a new Job. UUIDs are autoassigned.
If the query parameter `?pipeline_run_id={uuid}` is
provided, `params` is populated with the serialized
PipelineRun instance.
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
# setattr(request, '_dont_enforce_csrf_checks', True)
pipeline_run_id = request.query_params.get("pipeline_run_id", None)
samplecart_id = None
if pipeline_run_id:
try:
pipelinerun_obj = PipelineRun.objects.get(id=pipeline_run_id)
pipelinerun = PipelineRunSerializer(pipelinerun_obj).data
samplecart = pipelinerun.get("sample_cart", {})
if samplecart is None:
samplecart = {}
samplecart_id = samplecart.get("id", None)
pipelinerun["pipelinerun_id"] = str(pipelinerun["id"])
del pipelinerun["id"]
request.data["params"] = json.dumps(pipelinerun)
except PipelineRun.DoesNotExist:
return HttpResponse(
reason="pipeline_run %s does not exist" % pipeline_run_id,
status=status.HTTP_400_BAD_REQUEST,
)
if request.data.get("params"):
_params = json.loads(request.data["params"])
#
# TODO: Also add sanitize_filename to every file in the _params["fetch_files"] list
#
if _params.get("sample_cart", None) is not None:
_params["sample_cart"] = add_sanitized_names_to_samplecart_json(
_params["sample_cart"]
)
request.data["params"] = json.dumps(_params)
serializer = self.request_serializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
job_status = serializer.validated_data.get("status", "")
if job_status != "" and job_status != Job.STATUS_HOLD:
return HttpResponse(
reason='status can only be set to "%s" '
"or left unset for job creation" % Job.STATUS_HOLD,
status=status.HTTP_400_BAD_REQUEST,
)
job = serializer.save() # owner=request.user)
# We associate the previously created SampleCart with our new Job object
# (SampleCarts effectively should be readonly once associated with a Job).
if samplecart_id:
samplecart = SampleCart.objects.get(id=samplecart_id)
samplecart.job = job
samplecart.save()
if not job.compute_resource:
default_compute = _get_default_compute_resource(job)
job.compute_resource = default_compute
job.save()
if job.status == Job.STATUS_HOLD:
return Response(serializer.data, status=status.HTTP_200_OK)
job_id = job.id
job = Job.objects.get(id=job_id)
callback_url = get_abs_backend_url(
reverse("laxy_backend:job", args=[job.id]), request
)
job_event_url = get_abs_backend_url(
reverse("laxy_backend:create_job_eventlog", args=[job.id]), request
)
job_file_bulk_url = get_abs_backend_url(
reverse("laxy_backend:job_file_bulk", args=[job_id]), request
)
# port = request.META.get('SERVER_PORT', 8001)
# # domain = get_current_site(request).domain
# # public_ip = requests.get('https://api.ipify.org').text
# callback_url = (u'{scheme}://{domain}:{port}/api/v1/job/{job_id}/'.format(
# scheme=request.scheme,
# domain=PUBLIC_IP,
# port=port,
# job_id=job_id))
# DRF API key
# token, _ = Token.objects.get_or_create(user=request.user)
# callback_auth_header = 'Authorization: Token %s' % token.key
# JWT access token for user (expiring by default, so better)
# TODO: Set the expiry of this token based on the maximum allowed job walltime
callback_auth_header = get_jwt_user_header_str(request.user.username)
pipeline_name = job.params.get("pipeline", None)
try:
pipeline_obj = Pipeline.objects.get(name=pipeline_name)
except Pipeline.DoesNotExist as ex:
return HttpResponse(
reason=f'Pipeline "{pipeline_name}" does not exist."',
status=status.HTTP_404_NOT_FOUND,
)
if not pipeline_obj.public and not pipeline_obj.allowed_to_run(
request.user
):
return HttpResponse(
reason='Sorry, you do not have permission to run the pipeline "%s"'
% pipeline_name,
status=status.HTTP_403_FORBIDDEN,
)
# TODO: Given a pipeline, look up the appropriate validation and param prep functions,
# call these on job.params (or pipeline_run.params ?)
# We need to refactor out the RNAsik specific stuff here, maybe into a
# laxy_backend/pipelines/rnasik.py and laxy_backend/pipelines/seqkit_stats.py,
# and maybe some generic stuff in laxy_backend/pipelines/__init__.py
#
# validate_pipeline_params[pipeline_name](job.params)
# run_job_params = prepare_pipeline_params[pipeline_name](job.params)
# TODO: Maybe use the mappings in templates/genomes.json
# Maybe do all genome_id to path resolution in run_job.sh
# reference_genome_id = "Saccharomyces_cerevisiae/Ensembl/R64-1-1"
reference_genome_id = job.params.get("params").get("genome", None)
# TODO: Validate that pipeline_version and pipeline_aligner are
# one of the valid values, as per reference_genome_id
# We really need a consistent way / pattern to do this server-side 'form validation'
# maybe with a DRF Serializer.
# (probably the frontend should request valid values from the backend to populate forms,
# and then use the same data to validate serverside, as per REFERENCE_GENOME_MAPPINGS)
default_pipeline_version = (
"default" # "1.5.4" # '1.5.1+c53adf6' # '1.5.1'
)
pipeline_version = job.params.get("params").get(
"pipeline_version", default_pipeline_version
)
pipeline_aligner = job.params.get("params").get("pipeline_aligner", "star")
# TODO: This ID check should probably move into the PipelineRun
# params serializer.
reference_genome_fasta_url = pydash.get(
job.params, "params.user_genome.fasta_url", None
)
reference_genome_annotation_url = pydash.get(
job.params, "params.user_genome.annotation_url", None
)
if (
(
reference_genome_id
or (reference_genome_fasta_url and reference_genome_fasta_url)
)
and reference_genome_id not in REFERENCE_GENOME_MAPPINGS
and not reference_genome_fasta_url
and not reference_genome_fasta_url
# TODO: Check URLS are valid with http/https/ftp scheme
):
job.status = Job.STATUS_FAILED
job.save()
# job.delete()
return HttpResponse(
reason="Unknown reference genome",
status=status.HTTP_400_BAD_REQUEST,
)
slurm_config = job.compute_resource.extra.get("slurm", {})
# slurm_account = slurm_config.get("account", None)
slurm_extra_args = slurm_config.get("extra_args", None)
environment = dict(
DEBUG=sh_bool(getattr(settings, "DEBUG", False)),
IGNORE_SELF_SIGNED_CERTIFICATE=sh_bool(False),
JOB_ID=job_id,
JOB_PATH=job.abs_path_on_compute,
JOB_COMPLETE_CALLBACK_URL=callback_url,
JOB_EVENT_URL=job_event_url,
JOB_FILE_REGISTRATION_URL=job_file_bulk_url,
JOB_INPUT_STAGED=sh_bool(False),
REFERENCE_GENOME=shlex.quote(reference_genome_id),
PIPELINE_VERSION=shlex.quote(pipeline_version),
PIPELINE_ALIGNER=shlex.quote(pipeline_aligner),
QUEUE_TYPE=job.compute_resource.queue_type or "local",
# BDS_SINGLE_NODE=sh_bool(False),
# SLURM_ACCOUNT=slurm_account or "",
SLURM_EXTRA_ARGS=slurm_extra_args or "",
)
task_data = dict(
job_id=job_id,
clobber=False,
# this is job.params
# pipeline_run_config=pipeline_run.to_json(),
# gateway=settings.CLUSTER_MANAGEMENT_HOST,
# We don't pass JOB_AUTH_HEADER as 'environment'
# since we don't want it to leak into the shell env
# or any output of the run_job.sh script.
job_auth_header=callback_auth_header,
environment=environment,
)
# TESTING: Start cluster, run job, (pre-existing data), stop cluster
# tasks.run_job_chain(task_data)
result = start_job.apply_async(
args=(task_data,), link_error=_task_err_handler.s(job_id=job_id)
)
# Non-async for testing
# result = start_job(task_data)
# result = celery.chain(# tasks.stage_job_config.s(task_data),
# # tasks.stage_input_files.s(),
# tasks.start_job.s(task_data),
# ).apply_async()
# TODO: Make this error handler work.
# .apply_async(link_error=self._task_err_handler.s(job_id))
# Update the representation of the compute_resource to the uuid,
# otherwise it is serialized to 'ComputeResource object'
# serializer.validated_data.update(
# compute_resource=job.compute_resource.id)
# apparently validated_data doesn't include this (if it's flagged
# read-only ?), so we add it back
# serializer.validated_data.update(id=job_id)
job = Job.objects.get(id=job_id)
if result.state == "FAILURE":
raise result.result
# return Response({'error': result.traceback},
# status=status.HTTP_500_INTERNAL_SERVER_ERROR)
serializer = self.response_serializer(job)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class JobPagination(PageNumberPagination):
page_size = 10
page_query_param = "page"
page_size_query_param = "page_size"
max_page_size = 100
class JobListView(generics.ListAPIView):
"""
Retrieve a list of jobs. Can return:
- JSON format (`Accept: application/json` request header,
or call with a `.json` extension like `api/v1/jobs.json`)
- CSV format (`Accept: text/csv` request header,
or call with a `.csv` extension like `api/v1/jobs.csv`)
"""
renderer_classes = tuple(
getattr(api_settings, "DEFAULT_RENDERER_CLASSES", tuple())
) + (PaginatedCSVRenderer,)
serializer_class = JobListSerializerResponse
permission_classes = (IsOwner | IsSuperuser | HasReadonlyObjectAccessToken,)
pagination_class = JobPagination
def get_serializer_class(self):
if self.request.accepted_renderer.format == "csv":
return JobListSerializerResponse_CSV
return self.serializer_class
def paginate_queryset(self, queryset):
# We don't do pagination for CSV unless we have a
# ?page=1&page_size=10 style query string.
if self.request.accepted_renderer.format == "csv" and not any(
q in ["page", "page_size"] for q in self.request.query_params.dict().keys()
):
return None
return super().paginate_queryset(queryset)
def get_queryset(self):
user = self.request.user
# TODO: Add UI switch to show all jobs, only available in UI to admins
# (Or allow a user email filter via text box)
if user.is_superuser: # and self.request.query_params.get('all', False):
return Job.objects.order_by("-created_time")
return Job.objects.filter(owner=user).order_by("-created_time")
class PipelineView(JSONView, GetMixin):
queryset = Pipeline.objects.all()
serializer_class = PipelineSerializer
permission_classes = (IsAuthenticated & (IsOwner | IsSuperuser),)
class PipelineListView(generics.ListAPIView):
"""
Retrieve a (read-only) list of pipelines available to the current user.
"""
serializer_class = PipelineSerializer
# Not working ?
permission_classes = (IsPublic | DefaultObjectPermissions | IsOwner,)
# filter_backends would in theory be a nice declarative way to handle
# the subset of Pipelines a user sees in the list, however there doesn't
# seem to be a clean way to do unions of filtered results, and we'd like
# to do (IsPublic | ObjectPermissionsFilter,) to see public pipelines
# + private ones. Doing (IsPublic, ObjectPermissionsFilter,) is equivalent
# (IsPublic & ObjectPermissionsFilter,), which is not what we want.
#
# filter_backends = (
# ObjectPermissionsFilter,
# OrderingFilter,
# )
ordering_fields = ["-created_time"]
pagination_class = JobPagination
def get_queryset(self):
viewable_pipelines = get_objects_for_user(
self.request.user, ["laxy_backend.view_pipeline"]
)
return (
Pipeline.objects.filter(Q(owner=self.request.user) | Q(public=True)).union(
viewable_pipelines
)
).order_by("-created_time")
class BigPageNumberPagination(PageNumberPagination):
page_size = 100
page_query_param = "page"
page_size_query_param = "page_size"
max_page_size = 1000
class EventLogListView(generics.ListAPIView):
"""
To list all events for a particular job, use:
`/api/v1/eventlogs/?object_id={job_id}`
"""
lookup_field = "id"
queryset = EventLog.objects.all()
serializer_class = EventLogSerializer
filter_backends = (DjangoFilterBackend,)
filter_fields = (
"user",
"object_id",
"event",
)
pagination_class = BigPageNumberPagination
permission_classes = (IsAuthenticated | HasAccessTokenForEventLogSubject,)
def get_queryset(self):
if self.request.user.is_superuser:
return EventLog.objects.order_by("-timestamp")
else:
# If access_token is provided in query string, check it is valid for the requested the Job (object_id)
# and if so return Events for the Job
token = self.request.query_params.get("access_token", None)
obj_id = self.request.query_params.get("object_id", None)
if token and obj_id and token_is_valid(token, obj_id):
return EventLog.objects.filter(object_id=obj_id).order_by("-timestamp")
else:
return EventLog.objects.filter(user=self.request.user).order_by(
"-timestamp"
)
class EventLogCreate(JSONView):
queryset = EventLog.objects.all()
serializer_class = EventLogSerializer
def post(self, request: Request, version=None, subject_obj=None):
"""
Create a new EventLog.
These logs are intended to report events, but not trigger side effects.
Request body example:
```json
{
"event": "JOB_PIPELINE_COMPLETED",
"message": "Job completed.",
"extra": {"exit_code": 0},
"content_type": "job",
"object_id": "2w3iIE9BLKrnwHBz1xUtl9"
}
```
`event` is an 'enum' or 'tag' style string classifying the logged event - values
aren't currently enforced, but should generally be one of:
- `JOB_STATUS_CHANGED`
- `INPUT_DATA_DOWNLOAD_STARTED`
- `INPUT_DATA_DOWNLOAD_FINISHED`
- `JOB_PIPELINE_STARTING`
- `JOB_PIPELINE_FAILED`
- `JOB_PIPELINE_COMPLETED`
- `JOB_INFO`
- `JOB_ERROR`
`message` is a short free-text string intended to be read by humans.
`extra` contains arbitrary metadata about the event - conventions in use
include numeric process `exit_code`, and job status changes `from` and `to`.
<!--
:param subject_obj: An optional Django model that is the 'subject' of
the event, assigned to EventLog.obj. Mostly used for
subclasses that deal with events for specific
Model types (eg Jobs).
:type subject_obj: django.db.models.Model
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
serializer = self.get_serializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
if subject_obj is not None:
event_obj = serializer.save(user=request.user, obj=subject_obj)
else:
event_obj = serializer.save(user=request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class JobEventLogCreate(EventLogCreate):
serializer_class = JobEventLogSerializer
def post(self, request: Request, uuid=None, version=None):
"""
Create a new EventLog for the Job.
See <a href="#operation/v1_eventlog_create">/eventlog/</a> docs
(`content_type` and `object_id` are automatically set to the Job {job_id}).
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
job = None
if uuid is not None:
try:
job = Job.objects.get(id=uuid)
except Job.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if job.owner == request.user or request.user.is_superuser:
return super(JobEventLogCreate, self).post(
request, version=version, subject_obj=job
)
return Response(status=status.HTTP_403_FORBIDDEN)
class AccessTokenView(JSONView, GetMixin, DeleteMixin):
queryset = AccessToken.objects.all()
serializer_class = AccessTokenSerializer
permission_classes = (IsAuthenticated & (IsOwner | IsSuperuser),)
class AccessTokenListView(generics.ListAPIView):
"""
List access tokens sorted by expiry time.
Query parameters:
* active - if set, show only non-expired tokens (eg `?active=1`)
* object_id - the ID of the object this access token gives permission to access
* content_type - the content type (eg "job") of the target object
* created_by - filter by user (available only to superusers)
"""
lookup_field = "id"
queryset = AccessToken.objects.all()
serializer_class = AccessTokenSerializer
filter_backends = (DjangoFilterBackend,)
filter_fields = (
"created_by",
"content_type",
"object_id",
)
permission_classes = (IsAuthenticated & (IsOwner | IsSuperuser),)
# FIXME: Filtering by ?content_type=job fails to return results
def get_queryset(self):
qs = self.queryset
active = self.request.query_params.get("active", None)
# qs = qs.filter(created_by=self.request.user) # handled by permission_classes
if active:
qs = qs.filter(Q(expiry_time__gt=timezone.now()) | Q(expiry_time=None))
return qs.order_by("-expiry_time")
class AccessTokenCreate(JSONView):
queryset = AccessToken.objects.all()
serializer_class = AccessTokenSerializer
permission_classes = (IsAuthenticated & (IsOwner | IsSuperuser),)
def _owns_target_object(self, user, serializer):
content_type = serializer.validated_data.get("content_type", "job")
object_id = serializer.validated_data.get("object_id", None)
target_obj = ContentType.objects.get(
app_label="laxy_backend", model=content_type
).get_object_for_this_type(id=object_id)
return is_owner(user, target_obj)
def post(self, request: Request, version=None):
"""
Generate a time-limited access token for a specific object.
Can be used like `?access_token={the_token}` in certain URLs to provide
read-only access.
"""
serializer = self.get_serializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
if not self._owns_target_object(request.user, serializer):
return Response(serializer.errors, status=status.HTTP_403_FORBIDDEN)
obj = serializer.save(created_by=request.user)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class JobAccessTokenView(JSONView, GetMixin):
"""
This view can be used if we want just one token link per job which can be updated with a new expiry or deleted.
This simplifies the use of token links for users, at the cost of less flexibility.
"""
lookup_url_kwarg = "job_id"
queryset = AccessToken.objects.all()
serializer_class = JobAccessTokenRequestSerializer
permission_classes = (IsSuperuser | IsOwner | HasReadonlyObjectAccessToken,)
@cached_property
def _job_ct(self) -> ContentType:
return ContentType.objects.get(app_label="laxy_backend", model="job")
def _owns_target_object(self, user, obj_id):
target_obj = ContentType.objects.get(
app_label="laxy_backend", model=self._job_ct
).get_object_for_this_type(id=obj_id)
return is_owner(user, target_obj) or self.request.user.is_superuser
def get_queryset(self):
job_id = self.kwargs.get(self.lookup_url_kwarg, None)
if job_id is not None:
qs = self.queryset.filter(
Q(object_id=job_id) & Q(content_type=self._job_ct)
).order_by("created_time")
return qs
return AccessToken.objects.none()
@view_config(response_serializer=JobAccessTokenResponseSerializer)
def get(self, request: Request, job_id: str, version=None):
"""
Returns the (first created, non-hidden) AccessToken for this job.
<!--
:param request: The request object.
:type request:
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype:
-->
"""
obj = self.get_queryset().first()
if obj:
serializer = self.response_serializer(obj)
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(status=status.HTTP_204_NO_CONTENT)
@view_config(
request_serializer=JobAccessTokenRequestSerializer,
response_serializer=JobAccessTokenResponseSerializer,
)
def put(self, request: Request, job_id: str, version=None):
"""
Create or update the access token for this Job.
This always updates the first created, non-hidden token.
(`content_type` and `object_id` will be ignored - these are always `'job'` and the value of `job_id` at
this endpoint).
<!--
:param request:
:type request:
:param job_id:
:type job_id:
:param version:
:type version:
:return:
:rtype:
-->
"""
# We must check that the requesting user own the target Job, since the IsOwner permission (on the class) in
# this context applies to the AccessToken, not the Job itself.
# This ensures users can't create an AccessToken for a Job they don't own !
if not self._owns_target_object(request.user, job_id):
return Response(status=status.HTTP_403_FORBIDDEN)
obj = self.get_queryset().first()
if obj:
serializer = self.request_serializer(
obj, data=request.data, context={"request": request}
)
else:
data = dict(request.data)
data.update(object_id=job_id, content_type="job")
serializer = self.request_serializer(
data=data, context={"request": request}
)
if serializer.is_valid():
obj = serializer.save(created_by=request.user)
return Response(
self.response_serializer(obj).data, status=status.HTTP_200_OK
)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class JobClone(JSONView):
queryset = Job.objects.all()
serializer_class = JobSerializerResponse
permission_classes = (IsOwner | IsSuperuser,)
# permission_classes = (DjangoObjectPermissions,)
lookup_url_kwarg = "job_id"
@view_config(response_serializer=JobSerializerResponse)
@etag_headers
def post(self, request: Request, job_id, version=None):
"""
Returns info about a Job, specified by Job ID (UUID).
<!--
:param request: The request object.
:type request: rest_framework.request.Request
:param uuid: The URL-encoded UUID.
:type uuid: str
:return: The response object.
:rtype: rest_framework.response.Response
-->
"""
job = self.get_object()
# TODO: This should actually be a migration that modifies Job.params to conform to the new
# shape. We should probably start versioning our JSON blobs, or including a link to
# a JSON Schema (eg generated via marshmallow-jsonschema and linked to in the JSON blob
# something like {"schema": "https://laxy.io/api/v1/schemas/job.params/version-2"}).
# Advantage of this is we can then automatically generate TypeScript types in the
# frontend too using: https://www.npmjs.com/package/json-schema-to-typescript
#
_support_deprecated_sample_set = True
if _support_deprecated_sample_set:
samplecart = job.params.get("sample_cart", {})
if samplecart is None:
samplecart = {}
samplecart_id = samplecart.get("id", None)
if samplecart_id is None:
samplecart_id = job.params.get("sample_set", {}).get("id", None)
logger.warning(
f"Use of `sample_set` in Job.params is deprecated, please use `sample_cart` (when cloning Job {job_id})"
)
# if samplecart_id is None:
# return HttpResponse(
# status=status.HTTP_500_INTERNAL_SERVER_ERROR,
# reason=f"Cannot find samplecart associated with job {job.id}",
# )
# pipelinerun = PipelineRun.objects.filter(sample_cart=samplecart_id).first()
pipelinerun_id = job.params.get("pipelinerun_id", None)
pipelinerun = PipelineRun.objects.get(id=pipelinerun_id)
samplecart = pipelinerun.sample_cart
if samplecart is not None:
samplecart.pk = None
samplecart.id = None
# SampleCart is being cloned in order to be used for a new Job, so unset this
samplecart.job = None
samplecart.save()
pipelinerun.sample_cart = samplecart
pipelinerun.pk = None
pipelinerun.id = None
pipelinerun.save()
new_pipelinerun = pipelinerun
pipeline_name = job.params.get("pipeline", None)
return JsonResponse(
{
"pipelinerun_id": new_pipelinerun.id,
"pipeline": pipeline_name,
"samplecart_id": getattr(samplecart, "id", None),
}
)
# TODO: This should really be POST, since it has side effects,
# however GET is easier to trigger manually in the browser
@api_view(["GET"])
@renderer_classes([JSONRenderer])
@permission_classes([IsAdminUser])
def trigger_file_registration(request, job_id, version=None):
try:
job = Job.objects.get(id=job_id)
except Job.DoesNotExist:
return HttpResponse(status=404, reason=f"Job {job_id} doesn't exist.")
task_data = dict(job_id=job_id)
result = index_remote_files.apply_async(args=(task_data,))
return Response(
data={"task_id": result.id}, content_type="application/json", status=200
)
class SendFileToDegust(JSONView):
lookup_url_kwarg = "file_id"
queryset = File.objects.all()
serializer_class = FileSerializer
permission_classes = (IsOwner | IsSuperuser | FileHasAccessTokenForJob,)
# Non-async version
# @view_config(response_serializer=RedirectResponseSerializer)
# def post(self, request: Request, file_id: str, version=None):
#
# counts_file: File = self.get_object()
#
# if not counts_file:
# return HttpResponse(status=status.HTTP_404_NOT_FOUND,
# reason="File ID does not exist, (or your are not"
# "authorized to access it).")
#
# url = 'http://degust.erc.monash.edu/upload'
#
# browser = RoboBrowser(history=True, parser='lxml')
# browser.open(url)
#
# form = browser.get_form()
#
# # filelike = BytesIO(counts_file.file.read())
#
# form['filename'].value = counts_file.file # filelike
# browser.submit_form(form)
# degust_url = browser.url
#
# counts_file.metadata['degust_url'] = degust_url
# counts_file.save()
# #
# data = RedirectResponseSerializer(data={
# 'status': browser.response.status_code,
# 'redirect': degust_url})
# if data.is_valid():
# return Response(data=data.validated_data,
# status=status.HTTP_200_OK)
# else:
# return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR,
# reason="Error contacting Degust.")
@view_config(response_serializer=RedirectResponseSerializer)
def post(self, request: Request, file_id: str, version=None):
"""
Sends the File specified by `file_id` to the Degust web app (http://degust.erc.monash.edu).
The file should be a compatible counts matrix (eg TSV).
Creates a new Degust session if the file hasn't been send before, or returns the URL to the existing
session if it has.
Response:
The HTTP status code returned by Degust, and if successful the URL to the Degust session.
```json
{
"status": 200
"redirect": "http://degust.erc.monash.edu/degust/config.html?code=abfa3b7a2b726bfadcf3076f6feb3ecd"
}
```
"""
degust_api_url = getattr(settings, "DEGUST_URL", "http://degust.erc.monash.edu")
counts_file: File = self.get_object()
job = counts_file.fileset.jobs_as_output.first()
sample_cart = job.params.get("sample_cart", {})
samples = sample_cart.get("samples", [])
conditions = list(
set([sample["metadata"].get("condition") for sample in samples])
)
conditions = [c for c in conditions if c is not None and c.strip() != ""]
# {'some_condition': []} dict of empty lists
degust_conditions = OrderedDict([(condition, []) for condition in conditions])
for sample in samples:
# this is the unsanitized user supplied sample name
# (associated with both R1 and R2, and possbily multiple pairs
# of files that represent technical replicates)
# name = sample["name"]
name = sample.get("sanitized_name", None)
if name is None:
# We get all the sanitized_filenames for the sample, then
# find the longest common prefix. This essentially mimicks what
# RNAsik does when it automatically generates a samplesheet
sample_files = pydash.flatten(
[list(f.values()) for f in sample["files"]]
)
names = [
simplify_fastq_name(f["sanitized_filename"])
for f in sample_files
if "sanitized_filename" in f
]
name = longest_common_prefix(names)
condition = sample["metadata"].get("condition")
if condition is not None and condition.strip() != "":
degust_conditions[condition].append(name)
# Ensure we have no duplicated sample names in a condition
# (can possibly occur in some cases where we files that represent have technical
# replicates that are merged under a single sample name by the pipelines)
for condition in degust_conditions.keys():
deduped = tuple(set(degust_conditions[condition]))
degust_conditions[condition] = deduped
if not counts_file:
return HttpResponse(
status=status.HTTP_404_NOT_FOUND,
reason="File ID does not exist, (or your are not"
"authorized to access it).",
)
saved_degust_url = counts_file.metadata.get("degust_url", None)
# force_new is mostly for testing, creates a new Degust session overwriting the cached URL in File metadata
force_new = request.query_params.get("force_new", False)
if saved_degust_url and not force_new:
data = RedirectResponseSerializer(
data={"status": status.HTTP_200_OK, "redirect": saved_degust_url}
)
if data.is_valid():
return Response(data=data.validated_data, status=status.HTTP_200_OK)
# TODO: RoboBrowser is still required since while Degust no longer requires a CSRF token upon upload,
# an 'upload_token' per-user is required:
# https://github.com/drpowell/degust/blob/master/FAQ.md#uploading-a-counts-file-from-the-command-line
# We either need an application level API token for Degust (to create anonymous uploads), or a
# trust relationship (eg proper OAuth2 provider or simple shared Google Account ID) that allows
# Laxy to retrieve the upload token for a user programmatically
url = f"{degust_api_url}/upload"
browser = RoboBrowser(history=True, parser="lxml")
loop = asyncio.new_event_loop()
# This does the fetch of the form and the counts file simultaneously
async def get_form_and_file(url, fileish):
def get_upload_form(url):
browser.open(url)
return browser.get_form()
def get_counts_file_content(fh):
# filelike = BytesIO(fh.read())
# return filelike
return fh
future_form = loop.run_in_executor(None, get_upload_form, url)
future_file = loop.run_in_executor(None, get_counts_file_content, fileish)
form = await future_form
filelike = await future_file
return form, filelike
form, filelike = loop.run_until_complete(
get_form_and_file(url, counts_file.file)
)
loop.close()
# First POST the counts file, get a new Degust session ID
form["filename"].value = filelike
# TODO: This is to deal with SFTPStorage backend timeouts etc
# Ideally we would fork / subclass SFTPStorage and SFTPStorageFile
# and add some built-in backoff / retry functionality
# eg: https://github.com/MonashBioinformaticsPlatform/laxy/issues/52
@backoff.on_exception(
backoff.expo,
(
EOFError,
IOError,
OSError,
ssh_exception.SSHException,
ssh_exception.AuthenticationException,
),
max_tries=3,
jitter=backoff.full_jitter,
)
def submit_form(form):
browser.submit_form(form)
submit_form(form)
degust_config_url = browser.url.replace("/compare.html?", "/config.html?", 1)
degust_id = (
parse_qs(urlparse(degust_config_url).query).get("code", [None]).pop()
)
counts_file.metadata["degust_url"] = degust_config_url
counts_file.save()
# Now POST the settings to the Degust session, as per:
# https://github.com/drpowell/degust/blob/master/FAQ.md#uploading-a-counts-file-from-the-command-line
description = job.params.get("params").get("description", "")
replicates = list(degust_conditions.items())
init_select = []
if len(conditions) >= 2:
init_select = conditions[0:2]
def _firstline(fh):
"""
Read and return the first line from a file object.
Assumed file seek pointer is at the start.
"""
line = []
while True:
chunk = fh.read(128)
chunk = chunk.decode("utf-8")
if "\n" in chunk:
line.append(chunk.split("\n")[0])
return "".join(line)
else:
line.append(chunk)
# We take all the column names in the counts file, except the sample names
# (different pipeline versions might use 'raw' sample names input by user,
# or the sanitized versions as column headers, so we remove both)
# TODO: valid_info_columns should probably be a Pipeline.metadata value or
# pipeline app (or global) setting.
valid_info_columns = [
"Gene.ID",
"Chrom",
"Gene.Name",
"Biotype",
"gene_id",
"chromosome",
"gene_name",
"gene_biotype",
]
counts_header = _firstline(counts_file.file).split("\t")
info_columns = list(set(counts_header).intersection(set(valid_info_columns)))
degust_settings = {
"csv_format": False,
"replicates": replicates,
"fc_columns": [],
## RNAsik
# info_columns": ["Gene.ID", "Chrom", "Gene.Name", "Biotype"],
## nf-core/rnaseq
# info_columns": ["gene_id", "chromosome", "gene_name", "gene_biotype"]
"info_columns": info_columns,
"analyze_server_side": True,
"name": f"{description}",
"experimentDescription": f"(Laxy job: {job.id}, file: {counts_file.id})",
# 'primary_name': '',
"init_select": init_select,
# 'hidden_factor': [],
"link_column": "Gene.ID",
}
degust_settings_url = f"{degust_api_url}/degust/{degust_id}/settings"
resp = requests.post(
degust_settings_url, files={"settings": (None, json.dumps(degust_settings))}
)
resp.raise_for_status()
data = RedirectResponseSerializer(
data={"status": browser.response.status_code, "redirect": degust_config_url}
)
if data.is_valid():
return Response(data=data.validated_data, status=status.HTTP_200_OK)
else:
return HttpResponse(
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
reason="Error contacting Degust.",
)
class RemoteBrowseView(JSONView):
renderer_classes = (JSONRenderer,)
serializer_class = FileListing
filter_backends = (RemoteFilesQueryParams,)
api_docs_visible_to = "public"
# @method_decorator(cache_page(10 * 60))
@view_config(response_serializer=FileListing)
def post(self, request, version=None):
"""
Returns a single level of a file/directory tree.
Takes query parameters:
* `url` - the URL (http[s]:// or ftp://) to retrieve.
* `fileglob` - a glob pattern to filter returned files by (eg `*.csv`). Doesn't filter directories.
eg
**Request:**
`GET http://laxy.io/api/v1/remote-browse/?url=ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR343/001/SRR3438011/&fileglob=*.gz`
**Response:**
```json
{
"listing":[
{
"type":"file",
"name":"SRR3438011_1.fastq.gz",
"location":"ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR343/001/SRR3438011/SRR3438011_1.fastq.gz",
"tags": []
},
{
"type":"directory",
"name":"FastQC_reports",
"location":"ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR343/001/SRR3438011/FastQC_reports/",
"tags": []
},
{
"type":"file",
"name":"data.tar",
"location":"ftp://ftp.sra.ebi.ac.uk/vol1/fastq/SRR343/001/SRR3438011/data.tar.gz",
"tags": ['archive']
}
]
}
```
<!--
:param request:
:type request:
:param version:
:type version:
:return:
:rtype:
-->
"""
url = request.data.get("url", "")
fileglob = request.data.get("fileglob", "*")
if url == "":
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason="url query parameter is required.",
)
# def _looks_like_archive(fn):
# archive_extensions = ['.tar']
# return any([fn.endswith(ext) for ext in archive_extensions])
listing = []
scheme = urlparse(url).scheme
if scheme not in ["ftp", "http", "https"]:
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason=f"Unsupported scheme: {scheme}://",
)
try:
# We need to check the URL given is actually accessible
if scheme in ["http", "https"]:
resp = requests.head(url)
resp.raise_for_status()
except requests.exceptions.HTTPError as ex:
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": resp.status_code,
"reason": resp.reason,
}
},
# TODO: When frontend interprets this better, use status 400 and let the frontend
# report third-party response from the JSON blob
# status=status.HTTP_400_BAD_REQUEST,
status=resp.status_code,
reason=resp.reason,
)
except requests.exceptions.ConnectionError as ex:
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": None,
"reason": "ConnectionError",
}
},
status=None,
reason="ConnectionError",
)
except requests.exceptions.Timeout as ex:
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": status.HTTP_504_GATEWAY_TIMEOUT,
"reason": "Timeout",
}
},
status=status.HTTP_504_GATEWAY_TIMEOUT,
reason="Timeout",
)
except requests.exceptions.TooManyRedirects:
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": None,
"reason": "TooManyRedirects",
}
},
status=None,
reason="TooManyRedirects",
)
except BaseException as ex:
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": None,
"reason": "Request to remote server failed.",
}
},
status=None,
reason="Request to remote server failed.",
)
fn = Path(urlparse(url).path).name
if is_archive_link(url, use_network=True) or fn.endswith(".manifest-md5"):
try:
archive_files = http_remote.get_tar_file_manifest(url)
# Remove .manifest-md5 if present
u = urlparse(url)
url = u._replace(path=u.path.rstrip(".manifest-md5")).geturl()
for f in archive_files:
filepath = f["filepath"]
listing.append(
dict(
name=filepath,
location=f"{url}#{filepath}",
type="file",
tags=["inside_archive"],
)
)
except BaseException as ex:
logger.debug(f"Unable to find archive manifest for {url}")
logger.exception(ex)
fn = Path(urlparse(url).path).name
listing = [
dict(name=fn, location=f"{url}", type="file", tags=["archive"])
]
elif scheme == "ftp":
from fs import open_fs
try:
try:
# may raise fs.errors.RemoteConnectionError if FTP connection fails
ftp_fs = open_fs(url)
except BaseException as exx:
msg = getattr(exx, "msg", "")
return JsonResponse(
{
"remote_server_response": {
"url": url,
"status": 500,
"reason": msg,
}
},
# TODO: When frontend interprets this better, use status 400 and let the
# frontend report third-party response from the JSON blob
# status=status.HTTP_400_BAD_REQUEST,
status=500,
reason=msg,
)
for step in ftp_fs.walk(
filter=[fileglob], search="breadth", max_depth=1
):
listing.extend(
[
dict(
type="directory",
name=i.name,
location=f'{url.rstrip("/")}/{i.name}',
tags=[],
)
for i in step.dirs
]
)
listing.extend(
[
dict(
type="file",
name=i.name,
location=f'{url.rstrip("/")}/{i.name}',
tags=["archive"] if is_archive_link(i.name) else [],
)
for i in step.files
]
)
except DirectoryExpected as ex:
fn = Path(urlparse(url).path).name
listing = [
dict(
name=fn,
location=f"{url}",
type="file",
tags=["archive"] if is_archive_link(fn) else [],
)
]
elif scheme == "http" or scheme == "https":
_url = _check_content_size_and_resolve_redirects(url)
try:
text = render_page(_url)
except MemoryError as ex:
return HttpResponse(
status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, reason=str(ex)
)
except ValueError as ex:
return HttpResponse(status=status.HTTP_400_BAD_REQUEST, reason=str(ex))
_matched = False
for matcher, link_parser_fn in LINK_SCRAPER_MAPPINGS:
if isinstance(matcher, str) and matcher in _url:
_matched = True
elif callable(matcher) and matcher(_url, text):
_matched = True
if _matched:
listing = link_parser_fn(text, _url)
break
if not _matched:
return HttpResponse(
status=status.HTTP_400_BAD_REQUEST,
reason=f"No parser for this url: {_url}",
)
# listing = pydash.sort_by(listing, ['type', 'name'])
listing = multikeysort(listing, ["type", "name"])
item_list = FileListing({"listing": listing})
return Response(item_list.data, status=status.HTTP_200_OK)
def _get_or_create_drf_token(user):
token_query = Token.objects.filter(user=user)
if token_query.exists():
token = token_query.first()
else:
token = Token.objects.create(user=user)
return token
def _get_default_compute_resource(job: Job = None):
if job is None:
compute = ComputeResource.get_best_available()
else:
compute = _get_compute_resources_based_on_rules(job).first()
if not compute:
raise Exception(
f"Cannot find available ComputeResource. None defined, or all offline."
)
return compute
def _get_compute_resources_based_on_rules(job: Job):
# TODO: This should also incorporate per-user permissions to access specific ComputeResources
# (eg with django-guardian and/or django-rules). We should be able to do a similar email domain test
# with django-rules (eg write a can_use_compute(user, compute_resource) rule).
# Ideally, our {domain: compute} mapping rules would be in the database so we can change them without
# a restart.
email_domain_allowed_compute = getattr(
settings, "EMAIL_DOMAIN_ALLOWED_COMPUTE", {"*": ["*"]}
)
domain = job.owner.email.split("@")[-1]
names = email_domain_allowed_compute.get(domain, None)
if names is None:
names = email_domain_allowed_compute.get("*", [])
available_compute = ComputeResource.objects.filter(
status=ComputeResource.STATUS_ONLINE
).order_by("-priority")
if "*" in names:
return available_compute
allowed_compute = available_compute.filter(name__in=names).order_by("-priority")
if not allowed_compute.exists():
raise Exception(
f"Cannot find allowed ComputeResource for this email domain ({domain})."
)
return allowed_compute
|
#
# Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
#
import logging
from cfgm_common import get_bgp_rtgt_min_id
from cfgm_common import VNID_MIN_ALLOC
from cfgm_common.exceptions import BadRequest
from cfgm_common.exceptions import HttpError
from cfgm_common.exceptions import PermissionDenied
from cfgm_common.exceptions import RefsExistError
from cfgm_common.tests import test_common
from testtools import ExpectedException
from vnc_api.vnc_api import GlobalSystemConfig
from vnc_api.vnc_api import Project
from vnc_api.vnc_api import ProviderDetails
from vnc_api.vnc_api import RouteTargetList
from vnc_api.vnc_api import VirtualMachineInterface
from vnc_api.vnc_api import VirtualNetwork
from vnc_api.vnc_api import VirtualNetworkType
from vnc_cfg_api_server.tests import test_case
logger = logging.getLogger(__name__)
class TestVirtualNetwork(test_case.ApiServerTestCase):
@classmethod
def setUpClass(cls, *args, **kwargs):
cls.console_handler = logging.StreamHandler()
cls.console_handler.setLevel(logging.DEBUG)
logger.addHandler(cls.console_handler)
super(TestVirtualNetwork, cls).setUpClass(*args, **kwargs)
@classmethod
def tearDownClass(cls, *args, **kwargs):
logger.removeHandler(cls.console_handler)
super(TestVirtualNetwork, cls).tearDownClass(*args, **kwargs)
@property
def api(self):
return self._vnc_lib
def test_allocate_vn_id(self):
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn_obj)
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_id = vn_obj.virtual_network_network_id
self.assertEqual(vn_obj.get_fq_name_str(),
mock_zk.get_vn_from_id(vn_id))
self.assertGreaterEqual(vn_id, VNID_MIN_ALLOC)
def test_deallocate_vn_id(self):
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn_obj)
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_id = vn_obj.virtual_network_network_id
self.api.virtual_network_delete(id=vn_obj.uuid)
self.assertNotEqual(mock_zk.get_vn_from_id(vn_id),
vn_obj.get_fq_name_str())
def test_not_deallocate_vn_id_if_fq_name_does_not_correspond(self):
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn_obj)
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_id = vn_obj.virtual_network_network_id
fake_fq_name = "fake fq_name"
mock_zk._vn_id_allocator.delete(vn_id - VNID_MIN_ALLOC)
mock_zk._vn_id_allocator.reserve(vn_id - VNID_MIN_ALLOC, fake_fq_name)
self.api.virtual_network_delete(id=vn_obj.uuid)
self.assertIsNotNone(mock_zk.get_vn_from_id(vn_id))
self.assertEqual(fake_fq_name, mock_zk.get_vn_from_id(vn_id))
def test_cannot_set_vn_id(self):
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj.set_virtual_network_network_id(42)
with ExpectedException(PermissionDenied):
self.api.virtual_network_create(vn_obj)
def test_cannot_update_vn_id(self):
vn_obj = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn_obj)
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj.set_virtual_network_network_id(42)
with ExpectedException(PermissionDenied):
self.api.virtual_network_update(vn_obj)
# test can update with same value, needed internally
# TODO(ethuleau): not sure why it's needed
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj.set_virtual_network_network_id(
vn_obj.virtual_network_network_id)
self.api.virtual_network_update(vn_obj)
def test_create_vn_with_configured_rt_in_system_range(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
vn = VirtualNetwork('%s-vn' % self.id())
rt_name = 'target:%d:%d' % (gsc.autonomous_system,
get_bgp_rtgt_min_id(
gsc.autonomous_system) + 1000)
vn.set_route_target_list(RouteTargetList([rt_name]))
self.assertRaises(BadRequest, self.api.virtual_network_create, vn)
def test_update_vn_with_configured_rt_in_system_range(self):
gsc = self.api.global_system_config_read(GlobalSystemConfig().fq_name)
vn = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn)
rt_name = 'target:%d:%d' % (gsc.autonomous_system,
get_bgp_rtgt_min_id(
gsc.autonomous_system) + 1000)
vn.set_route_target_list(RouteTargetList([rt_name]))
self.assertRaises(BadRequest, self.api.virtual_network_update, vn)
def test_allocate_vxlan_id(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(6000)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if vxlan_id is set
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties = vn_obj.get_virtual_network_properties()
if not vn_obj_properties:
self.fail("VN properties are not set")
vxlan_id = vn_obj_properties.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6000)
self.assertEqual(vn_obj.get_fq_name_str() + "_vxlan",
mock_zk.get_vn_from_id(vxlan_id))
self.assertGreaterEqual(vxlan_id, VNID_MIN_ALLOC)
self.api.virtual_network_delete(id=vn_obj.uuid)
logger.debug('PASS - test_allocate_vxlan_id')
def test_cannot_allocate_vxlan_id(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn1_obj = VirtualNetwork('%s-vn' % self.id())
vn1_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn1_obj_properties.set_vxlan_network_identifier(6001)
vn1_obj_properties.set_forwarding_mode('l2_l3')
vn1_obj.set_virtual_network_properties(vn1_obj_properties)
self.api.virtual_network_create(vn1_obj)
# VN created, now read back the VN data to check if vxlan_id is set
vn1_obj = self.api.virtual_network_read(id=vn1_obj.uuid)
vn1_obj_properties = vn1_obj.get_virtual_network_properties()
if not vn1_obj_properties:
self.fail("VN properties are not set")
vxlan_id = vn1_obj_properties.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6001)
# Verified vxlan_id for VN1, now create VN2 with same vxlan_id
vn2_obj = VirtualNetwork('%s-vn2' % self.id())
vn2_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn2_obj_properties.set_vxlan_network_identifier(6001)
vn2_obj_properties.set_forwarding_mode('l2_l3')
vn2_obj.set_virtual_network_properties(vn2_obj_properties)
with ExpectedException(BadRequest):
self.api.virtual_network_create(vn2_obj)
self.assertEqual(vn1_obj.get_fq_name_str() + "_vxlan",
mock_zk.get_vn_from_id(vxlan_id))
self.assertGreaterEqual(vxlan_id, VNID_MIN_ALLOC)
self.api.virtual_network_delete(id=vn1_obj.uuid)
logger.debug('PASS - test_cannot_allocate_vxlan_id')
def test_deallocate_vxlan_id(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(6002)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if vxlan_id is set
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties = vn_obj.get_virtual_network_properties()
if not vn_obj_properties:
self.fail("VN properties are not set")
vxlan_id = vn_obj_properties.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6002)
self.api.virtual_network_delete(id=vn_obj.uuid)
self.assertNotEqual(vn_obj.get_fq_name_str() + "_vxlan",
mock_zk.get_vn_from_id(vxlan_id))
logger.debug('PASS - test_deallocate_vxlan_id')
def test_update_vxlan_id(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(6003)
vn_obj_properties.set_forwarding_mode('l2_l3')
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if vxlan_id is set
vn_obj_read = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
vxlan_id = vn_obj_properties_read.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6003)
# Created VN. Now Update it with a different vxlan_id
vn_obj_properties.set_vxlan_network_identifier(6004)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_update(vn_obj)
vn_obj_read = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
vxlan_id = vn_obj_properties_read.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6004)
self.api.virtual_network_delete(id=vn_obj.uuid)
logger.debug('PASS - test_update_vxlan_id')
def test_cannot_update_vxlan_id(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
vn1_obj = VirtualNetwork('%s-vn1' % self.id())
vn1_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn1_obj_properties.set_vxlan_network_identifier(6005)
vn1_obj_properties.set_forwarding_mode('l2_l3')
vn1_obj.set_virtual_network_properties(vn1_obj_properties)
self.api.virtual_network_create(vn1_obj)
# VN created, create second VN with different vxlan_id
vn2_obj = VirtualNetwork('%s-vn2' % self.id())
vn2_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn2_obj_properties.set_vxlan_network_identifier(6006)
vn2_obj_properties.set_forwarding_mode('l2_l3')
vn2_obj.set_virtual_network_properties(vn2_obj_properties)
self.api.virtual_network_create(vn2_obj)
# Created Two VNs. Now Update it second VN with 1st VNs VXLAN_ID
vn2_obj_properties.set_vxlan_network_identifier(6005)
vn2_obj.set_virtual_network_properties(vn2_obj_properties)
with ExpectedException(BadRequest):
self.api.virtual_network_update(vn2_obj)
vn_obj_read = self.api.virtual_network_read(id=vn2_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
vxlan_id = vn_obj_properties_read.get_vxlan_network_identifier()
self.assertEqual(vxlan_id, 6006)
self.api.virtual_network_delete(id=vn2_obj.uuid)
self.api.virtual_network_delete(id=vn1_obj.uuid)
logger.debug('PASS - test_cannot_update_vxlan_id')
def test_update_auto_vxlan_id_with_the_same_value(self):
"""
Test case.
1. Set VxLAN identifier mode to 'automatic'.
2. Create new VirtualNetwork.
3. Set VxLAN identifier mode to 'configured'.
4. Update VirtualNetwork with vxlan network identifier equal to
network id.
"""
gvc_fq_name = ['default-global-system-config',
'default-global-vrouter-config']
vxlan_id_mode = {'auto': 'automatic', 'user': 'configured'}
# Set VxLAN identifier mode to 'automatic'
gvc = self.api.global_vrouter_config_read(fq_name=gvc_fq_name)
gvc.set_vxlan_network_identifier_mode(vxlan_id_mode['auto'])
self.api.global_vrouter_config_update(gvc)
gvc = self.api.global_vrouter_config_read(fq_name=gvc_fq_name)
# verify vxlan id mode has been set
self.assertEqual(gvc.vxlan_network_identifier_mode,
vxlan_id_mode['auto'])
# Create new VirtualNetwork
vn = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn)
vn = self.api.virtual_network_read(fq_name=vn.fq_name)
# verify vn_network_id has been set
vn_network_id = vn.get_virtual_network_network_id()
self.assertTrue(vn_network_id > 0)
# Set VxLAN identifier mode to 'configured' (user defined)
gvc.set_vxlan_network_identifier_mode(vxlan_id_mode['user'])
self.api.global_vrouter_config_update(gvc)
gvc = self.api.global_vrouter_config_read(fq_name=gvc_fq_name)
# verify vxlan id mode has been set
self.assertEqual(gvc.vxlan_network_identifier_mode,
vxlan_id_mode['user'])
# Update VirtualNetwork with vxlan network identifier
# equal to network id
vn_properties = VirtualNetworkType()
vn_properties.set_vxlan_network_identifier(vn_network_id)
vn.set_virtual_network_properties(vn_properties)
self.api.virtual_network_update(vn)
# verify vn_network_id is the same as vxlan_network_id
vn = self.api.virtual_network_read(fq_name=vn.fq_name)
vxlan_id = vn.get_virtual_network_properties() \
.get_vxlan_network_identifier()
self.assertEqual(vn_network_id, vxlan_id)
def test_context_undo_fail_db_create(self):
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
zk_alloc_count_start = mock_zk._vn_id_allocator.get_alloc_count()
def stub(*args, **kwargs):
return (False, (500, "Fake error"))
with ExpectedException(HttpError):
with test_common.flexmocks(
[(self._api_server._db_conn, 'dbe_create', stub)]):
self.api.virtual_network_create(vn_obj)
zk_alloc_count_current = mock_zk._vn_id_allocator.get_alloc_count()
self.assertEqual(zk_alloc_count_start, zk_alloc_count_current)
def test_context_undo_vxlan_id_fail_db_create(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(6000)
vn_obj.set_virtual_network_properties(vn_obj_properties)
def stub(*args, **kwargs):
return (False, (500, "Fake error"))
zk_alloc_count_start = mock_zk._vn_id_allocator.get_alloc_count()
with ExpectedException(HttpError):
with test_common.flexmocks(
[(self._api_server._db_conn, 'dbe_create', stub)]):
self.api.virtual_network_create(vn_obj)
# make sure allocation counter stays the same
zk_alloc_count_current = mock_zk._vn_id_allocator.get_alloc_count()
self.assertEqual(zk_alloc_count_start, zk_alloc_count_current)
def test_context_undo_fail_db_delete(self):
vn_obj = self.create_virtual_network('vn-l2-%s' % self.id())
vn_ipam_refs = vn_obj.get_network_ipam_refs()
mock_zk = self._api_server._db_conn._zk_db
zk_alloc_count_start = mock_zk._vn_id_allocator.get_alloc_count()
def stub(*args, **kwargs):
return (False, (500, "Fake error"))
with ExpectedException(HttpError):
with test_common.flexmocks(
[(self._api_server._db_conn, 'dbe_delete', stub)]):
self.api.virtual_network_delete(id=vn_obj.uuid)
# Make sure ipam refs still present (undo action recreated it)
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_ipam_refs_after_delete_fail = vn_obj.get_network_ipam_refs()
self.assertEqual(vn_ipam_refs[0]['to'],
vn_ipam_refs_after_delete_fail[0]['to'])
self.assertEqual(vn_ipam_refs[0]['uuid'],
vn_ipam_refs_after_delete_fail[0]['uuid'])
self.assertEqual(vn_ipam_refs[0]['attr'].ipam_subnets[0].subnet_uuid,
vn_ipam_refs_after_delete_fail[0][
'attr'].ipam_subnets[0].subnet_uuid)
# Make sure allocation counter stays the same
zk_alloc_count_current = mock_zk._vn_id_allocator.get_alloc_count()
self.assertEqual(zk_alloc_count_start, zk_alloc_count_current)
def test_context_undo_vxlan_id_fail_db_update(self):
# enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
# Create vxlan
vxlan_id = 6000
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(vxlan_id)
vn_obj_properties.set_forwarding_mode('l2_l3')
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
vxlan_fqname = mock_zk.get_vn_from_id(vxlan_id)
# Update vxlan id (will fail)
new_vxlan_id = 6005
vn_obj_properties.set_vxlan_network_identifier(new_vxlan_id)
vn_obj.set_virtual_network_properties(vn_obj_properties)
def stub(*args, **kwargs):
return (False, (500, "Fake error"))
zk_alloc_count_start = mock_zk._vn_id_allocator.get_alloc_count()
with ExpectedException(HttpError):
with test_common.flexmocks(
[(self._api_server._db_conn, 'dbe_update', stub)]):
self.api.virtual_network_update(vn_obj)
# Make sure vxlan_id is still allocated with same name
new_vxlan_fqname = mock_zk.get_vn_from_id(vxlan_id)
self.assertEqual(new_vxlan_fqname, vxlan_fqname)
# Make sure new_vxlan_id is deallocated
update_vxlan_fqname = mock_zk.get_vn_from_id(new_vxlan_id)
self.assertEqual(update_vxlan_fqname, None)
# Make sure allocation counter stays the same
zk_alloc_count_current = mock_zk._vn_id_allocator.get_alloc_count()
self.assertEqual(zk_alloc_count_start, zk_alloc_count_current)
def test_context_undo_vn_to_vxlan_id_fail_db_update(self):
# Enable vxlan routing on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
proj.set_vxlan_routing(True)
self._vnc_lib.project_update(proj)
mock_zk = self._api_server._db_conn._zk_db
vn_obj = VirtualNetwork('%s-vn' % self.id())
self.api.virtual_network_create(vn_obj)
vn_fqname = mock_zk.get_vn_from_id(vn_obj.virtual_network_network_id)
vn_id = vn_obj.virtual_network_network_id
# Change vn to vxlan type
vxlan_id = 6000
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_vxlan_network_identifier(vxlan_id)
vn_obj_properties.set_forwarding_mode('l2_l3')
vn_obj.set_virtual_network_properties(vn_obj_properties)
def stub(*args, **kwargs):
return (False, (500, "Fake error"))
zk_alloc_count_start = mock_zk._vn_id_allocator.get_alloc_count()
with ExpectedException(HttpError):
with test_common.flexmocks(
[(self._api_server._db_conn, 'dbe_update', stub)]):
self.api.virtual_network_update(vn_obj)
# Make sure vxlan_id was dealocated
new_vxlan_fqname = mock_zk.get_vn_from_id(vxlan_id)
self.assertEqual(new_vxlan_fqname, None)
# Make sure vn id is the same
new_vn_id = vn_obj.virtual_network_network_id
self.assertEqual(vn_id, new_vn_id)
# Make sure fqname is the same fot vn_id
update_vn_fqname = mock_zk.get_vn_from_id(
vn_obj.virtual_network_network_id)
self.assertEqual(vn_fqname, update_vn_fqname)
# Make sure allocation counter stays the same
zk_alloc_count_current = mock_zk._vn_id_allocator.get_alloc_count()
self.assertEqual(zk_alloc_count_start, zk_alloc_count_current)
def test_create_provider_vn(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 100,
"physical_network": "physnet1"}))
vn_uuid = self.api.virtual_network_create(vn)
is_provider_network = (self
.api.virtual_network_read(id=vn_uuid)
.get_is_provider_network())
self.assertTrue(is_provider_network)
# end test_create_provider_vn
def test_create_provider_vn_without_provider_details(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn.set_is_provider_network(True)
vn_uuid = self.api.virtual_network_create(vn)
is_provider_network = (self
.api.virtual_network_read(id=vn_uuid)
.get_is_provider_network())
self.assertTrue(is_provider_network)
# end test_create_provider_vn_without_provider_details
def test_update_not_in_use_non_provider_vn_to_provider(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn_uuid = self.api.virtual_network_create(vn)
vn = self.api.virtual_network_read(id=vn_uuid)
is_provider_network = vn.get_is_provider_network()
self.assertFalse(is_provider_network)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 100,
"physical_network": "physnet1"}))
self.api.virtual_network_update(vn)
vn = self.api.virtual_network_read(id=vn_uuid)
is_provider_network = vn.get_is_provider_network()
self.assertTrue(is_provider_network)
updated_provider_properties = vn.get_provider_properties()
segmentation_id = updated_provider_properties.get_segmentation_id()
physical_network = updated_provider_properties.get_physical_network()
self.assertEqual((100, "physnet1"),
(segmentation_id, physical_network))
# end test_update_non_provider_vn_to_provider
def test_update_non_provider_vn_to_provider_without_provider_details(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn_uuid = self.api.virtual_network_create(vn)
vn = self.api.virtual_network_read(id=vn_uuid)
is_provider_network = vn.get_is_provider_network()
self.assertFalse(is_provider_network)
vn.set_is_provider_network(True)
self.api.virtual_network_update(vn)
vn = self.api.virtual_network_read(id=vn_uuid)
is_provider_network = vn.get_is_provider_network()
self.assertTrue(is_provider_network)
# end test_update_non_provider_vn_to_provider_without_provider_details
def test_update_in_use_vn_to_provider_vn(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn_uuid = self.api.virtual_network_create(vn)
vmi = VirtualMachineInterface('%s-vmi' % self.id(), parent_obj=project)
vmi.set_virtual_network(vn)
self.api.virtual_machine_interface_create(vmi)
vn = self.api.virtual_network_read(id=vn_uuid)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 100,
"physical_network": "physnet1"}))
self.api.virtual_network_update(vn)
updated_provider_properties = (self
.api.virtual_network_read(id=vn.uuid)
.get_provider_properties())
segmentation_id = updated_provider_properties.get_segmentation_id()
physical_network = updated_provider_properties.get_physical_network()
self.assertEqual((100, "physnet1"),
(segmentation_id, physical_network))
# end test_update_in_use_vn_to_provider_vn
def test_update_in_use_vn_to_provider_vn_without_physnet_label(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn_uuid = self.api.virtual_network_create(vn)
vmi = VirtualMachineInterface('%s-vmi' % self.id(), parent_obj=project)
vmi.set_virtual_network(vn)
self.api.virtual_machine_interface_create(vmi)
vn = self.api.virtual_network_read(id=vn_uuid)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 100}))
with ExpectedException(RefsExistError):
self.api.virtual_network_update(vn)
updated_provider_properties = (self
.api.virtual_network_read(id=vn.uuid)
.get_provider_properties())
self.assertEqual(None, updated_provider_properties)
# end test_update_in_use_vn_to_provider_vn_without_physnet_label
def test_update_in_use_vn_to_provider_vn_without_segmentation(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn_uuid = self.api.virtual_network_create(vn)
vmi = VirtualMachineInterface('%s-vmi' % self.id(), parent_obj=project)
vmi.set_virtual_network(vn)
self.api.virtual_machine_interface_create(vmi)
vn = self.api.virtual_network_read(id=vn_uuid)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"physical_network": "physnet1"}))
with ExpectedException(RefsExistError):
self.api.virtual_network_update(vn)
updated_provider_properties = (self
.api.virtual_network_read(id=vn.uuid)
.get_provider_properties())
self.assertEqual(None, updated_provider_properties)
# end test_update_in_use_vn_to_provider_vn_without_segmentation
def test_update_in_use_provider_vn(self):
project = Project('%s-project' % self.id())
project_uuid = self.api.project_create(project)
project = self.api.project_read(id=project_uuid)
vn = VirtualNetwork('%s-vn' % self.id(), parent_obj=project)
vn.set_is_provider_network(True)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 100,
"physical_network": "physnet1"}))
vn_uuid = self.api.virtual_network_create(vn)
vmi = VirtualMachineInterface('%s-vmi' % self.id(), parent_obj=project)
vmi.set_virtual_network(vn)
self.api.virtual_machine_interface_create(vmi)
vn = self.api.virtual_network_read(id=vn_uuid)
vn.set_provider_properties(
ProviderDetails(
params_dict={"segmentation_id": 200,
"physical_network": "physnet2"}))
with ExpectedException(RefsExistError):
self.api.virtual_network_update(vn)
updated_provider_properties = (self
.api.virtual_network_read(id=vn.uuid)
.get_provider_properties())
segmentation_id = updated_provider_properties.get_segmentation_id()
physical_network = updated_provider_properties.get_physical_network()
self.assertEqual((100, "physnet1"),
(segmentation_id, physical_network))
# end test_update_in_use_provider_vn
def test_allocate_mtu_value_range(self):
# enable MTU value on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
self._vnc_lib.project_update(proj)
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_mtu(8000)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if MTU_id is set
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties = vn_obj.get_virtual_network_properties()
if not vn_obj_properties:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties.get_mtu()
if mtu_id in range(0, 9216):
self.assertEqual(mtu_id, 8000)
logger.debug('PASS - test_allocate_mtu_range_value')
else:
logger.debug('FAIL -test allocate_mtu_range_value')
self.api.virtual_network_delete(id=vn_obj.uuid)
def test_update_mtu(self):
# enable mtu on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
self._vnc_lib.project_update(proj)
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_mtu(3000)
vn_obj_properties.set_forwarding_mode('l2_l3')
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if mtu_id is set
vn_obj_read = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties_read.get_mtu()
self.assertEqual(mtu_id, 3000)
# Created VN. Now Update it with a different mtu_id
vn_obj_properties.set_mtu(8000)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_update(vn_obj)
vn_obj_read = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties_read.get_mtu()
self.assertEqual(mtu_id, 8000)
self.api.virtual_network_delete(id=vn_obj.uuid)
logger.debug('PASS - test_update_mtu_value')
def test_deallocate_mtu_value_to_none_range(self):
# enable MTU value on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
self._vnc_lib.project_update(proj)
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_mtu(2000)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if MTU_id is set
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties = vn_obj.get_virtual_network_properties()
if not vn_obj_properties:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties.get_mtu()
self.assertEqual(mtu_id, 2000)
# Created VN. Now Update it with a different mtu_id
vn_obj_properties.set_mtu(None)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_update(vn_obj)
vn_obj_read = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties_read = vn_obj_read.get_virtual_network_properties()
if not vn_obj_properties_read:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties_read.get_mtu()
self.assertEqual(mtu_id, None)
self.api.virtual_network_delete(id=vn_obj.uuid)
logger.debug('PASS - test_update_mtu_none_value')
def test_default_mtu_value_range(self):
# enable MTU value on project
proj = self._vnc_lib.project_read(
fq_name=["default-domain", "default-project"])
self._vnc_lib.project_update(proj)
vn_obj = VirtualNetwork('%s-vn' % self.id())
vn_obj_properties = VirtualNetworkType(forwarding_mode='l3')
vn_obj_properties.set_mtu(None)
vn_obj.set_virtual_network_properties(vn_obj_properties)
self.api.virtual_network_create(vn_obj)
# VN created, now read back the VN data to check if MTU_id is set
vn_obj = self.api.virtual_network_read(id=vn_obj.uuid)
vn_obj_properties = vn_obj.get_virtual_network_properties()
if not vn_obj_properties:
self.fail("VN properties are not set")
mtu_id = vn_obj_properties.get_mtu()
if mtu_id in range(0, 9216) or None:
self.assertEqual(mtu_id, None)
logger.debug('PASS - test_allocate_mtu_range_value')
else:
logger.debug('FAIL -test allocate_mtu_range_value')
self.api.virtual_network_delete(id=vn_obj.uuid)
|
#!/usr/bin/python
import sys
def main():
if len(sys.argv) != 2:
print("Usage:\n sums.py FILENAME")
return
filename = sys.argv[1]
file = open(filename, 'r')
# Gross and Net
gross = 0
net = 0
for line in file:
tokens = line.split()
if len(tokens) > 0 and tokens[0].startswith('201'):
gross += float(tokens[1])
net += float(tokens[2]) if (len(tokens) > 2) else 0
print("Gross: ", str(gross), "Net", str(net), "Tax", str(gross-net))
if __name__ == '__main__':
main()
|
"""Synapse Challenge Services"""
import json
from typing import Union, Iterator
from synapseclient import Project, Synapse, Team
from synapseclient.core.utils import id_of
from .synapseservices.challenge import Challenge
class ChallengeApi:
"""Challenge services
https://docs.synapse.org/rest/index.html#org.sagebionetworks.repo.web.controller.ChallengeController
Args:
id: The ID of this Challenge object
project: synapseclient.Project or its id
team: synapseclient.Team or its id
"""
def __init__(self, syn: Synapse):
self.syn = syn
def create_challenge(self, teamid: str, projectid: str) -> Challenge:
"""Creates a challenge
Args:
teamid: A Synapse Team id
projectid: A Synapse Project id
Returns:
A synapseservices.Challenge
"""
challenge_object = {"participantTeamId": teamid, "projectId": projectid}
challenge = self.syn.restPOST("/challenge", json.dumps(challenge_object))
return Challenge(**challenge)
def get_registered_challenges(self, participantId: str) -> Iterator[Challenge]:
"""Gets a list of challenges a participant is registered to
Args:
participantId: A Synapse User Id
Yields:
A synapseservices.Challenge
"""
challenges = self.syn._GET_paginated(
f"/challenge?participantId={participantId}"
)
for challenge in challenges:
yield Challenge(**challenge)
def get_challenge(
self, challengeid: str = None, projectid: str = None
) -> Challenge:
"""Gets a challenge
Args:
challengeid: A Synapse Challenge id
projectId: A Synapse Project id
Returns:
A synapseservices.Challenge
"""
if challengeid is not None:
url = f"/challenge/{challengeid}"
elif projectid is not None:
url = f"/entity/{projectid}/challenge"
else:
raise ValueError("Must pass in `challengeid` or `projectid`")
return Challenge(**self.syn.restGET(url))
def update_challenge(
self, challengeid: str, teamid: str = None, projectid: str = None
) -> Challenge:
"""Updates a Synapse Challenge
Args:
challengeid: A Synapse Challenge id
teamid: A Synapse Team id
projectid: A Synapse Project id
Returns:
A synapseservices.Challenge
"""
challenge_object = {
"id": challengeid,
"participantTeamId": teamid,
"projectId": projectid,
}
challenge = self.syn.restPUT(
f"/challenge/{challengeid}", json.dumps(challenge_object)
)
return Challenge(**challenge)
def delete_challenge(self, challengeid: str):
"""Deletes a Synapse Challenge
Args:
challengeid: A Synapse Challenge id
"""
return self.syn.restDELETE(f"/challenge/{challengeid}")
def get_registered_participants(self, challengeid: str) -> list:
"""Get participants registered for a challenge
Args:
challengeid: A Synapse Challenge id
Returns:
Registered participants
"""
url = f"/challenge/{challengeid}/participant"
return self.syn._GET_paginated(url)
def get_registered_teams(self, challengeid: str):
"""Get teams registered for a challenge
Args:
challengeid: A Synapse Challenge id
Returns:
Registered teams
"""
url = f"/challenge/{challengeid}/challengeTeam"
return self.syn._GET_paginated(url)
def register_team(self, challengeid: str, teamid: str):
"""Register team
Args:
challengeid: A Synapse challenge id
teamid: A Synapse Team id
Returns:
A Synapse team
"""
team_dict = {"challengeId": challengeid, "teamId": teamid}
return self.syn.restPOST(
f"/challenge/{challengeid}/challengeTeam", json.dumps(team_dict)
)
def get_registered_challenges(syn: Synapse, userid: str = None) -> Iterator[Project]:
"""Get the Synapse Challenge Projects a user is registered to.
Defaults to the logged in synapse user.
Args:
syn: Synapse connection
userid: Specify userid if you want to know the challenges
another Synapse user is registered to.
Yields:
A synapseclient.Project
"""
challenge_api = ChallengeApi(syn=syn)
# This will return the logged in user profile if None is passed in
profile = syn.getUserProfile(userid)
userid = profile.ownerId
registered = challenge_api.get_registered_challenges(participantId=userid)
for challenge in registered:
challenge_ent = syn.get(challenge.projectId)
print(challenge_ent.name)
yield challenge_ent
def get_challenge(syn: Synapse, project: Union[Project, str]) -> Challenge:
"""Get the Challenge associated with a Project.
See the definition of a Challenge object here:
https://docs.synapse.org/rest/org/sagebionetworks/repo/model/Challenge.html
Args:
syn: Synapse connection
project: A synapseclient.Project or its id
Returns:
Challenge object
"""
synid = id_of(project)
challenge_api = ChallengeApi(syn=syn)
challenge_obj = challenge_api.get_challenge(projectid=synid)
return challenge_obj
def create_challenge(
syn: Synapse, project: Union[Project, str], team: Union[Team, str]
) -> Challenge:
"""Creates Challenge associated with a Project
Args:
syn: Synapse connection
project: A synapseclient.Project or its id
team: A synapseclient.Team or its id
Returns:
Challenge object
"""
synid = id_of(project)
teamid = id_of(team)
challenge_api = ChallengeApi(syn=syn)
challenge_obj = challenge_api.create_challenge(projectid=synid, teamid=teamid)
return challenge_obj
|
import oci
from oci.config import from_file
import base64
config = from_file(file_location="C:\\Users...config", profile_name='DEFAULT')
# Manages encryption/decryption of the data key.
def encryptdatakey(masterkeyocid):
# This function is called only when the Data Key needs to be encrypted by the Master Key.
# This function is rarely used only during master key rotation
datakeyocid = "<OCID of the data key>"
datakey = datakeyocid # Convert into Base64/Bytes if required.
masterkey = masterkeyocid
key_management_client = oci.key_management.KmsCryptoClient(config, "https://your_crypto_head.oraclecloud.com")
encrypt_data_details = oci.key_management.models.EncryptDataDetails(
key_id=masterkey,
plaintext=datakey)
encrypt_response = key_management_client.encrypt(encrypt_data_details)
return encrypt_response
def decryptdatakey(masterkey, artifact):
# This function is called only the Data Key is needs.
# Note:- Only the Data Key OCID is returned, the actual Data Key is safe in the Vault.
key_management_client = oci.key_management.KmsCryptoClient(config, "https://your_crypto_head.oraclecloud.com")
decrypt_data_details = oci.key_management.models.DecryptDataDetails(
key_id=masterkey,
ciphertext=artifact)
decrypt_response = key_management_client.encrypt(decrypt_data_details)
return decrypt_response
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.